Merge branch 'dev' into aranet-threshold-level

This commit is contained in:
Parker Brown 2025-02-17 16:48:00 -07:00 committed by GitHub
commit d567552c19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
151 changed files with 1871 additions and 528 deletions

2
Dockerfile generated
View File

@ -13,7 +13,7 @@ ENV \
ARG QEMU_CPU ARG QEMU_CPU
# Install uv # Install uv
RUN pip3 install uv==0.5.27 RUN pip3 install uv==0.6.0
WORKDIR /usr/src WORKDIR /usr/src

View File

@ -7,7 +7,7 @@ from dataclasses import dataclass
from adguardhome import AdGuardHome, AdGuardHomeConnectionError from adguardhome import AdGuardHome, AdGuardHomeConnectionError
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( from homeassistant.const import (
CONF_HOST, CONF_HOST,
CONF_NAME, CONF_NAME,
@ -123,12 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
"""Unload AdGuard Home config entry.""" """Unload AdGuard Home config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# This is the last loaded instance of AdGuard, deregister any services # This is the last loaded instance of AdGuard, deregister any services
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL) hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL) hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)

View File

@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/airgradient", "documentation": "https://www.home-assistant.io/integrations/airgradient",
"integration_type": "device", "integration_type": "device",
"iot_class": "local_polling", "iot_class": "local_polling",
"requirements": ["airgradient==0.9.1"], "requirements": ["airgradient==0.9.2"],
"zeroconf": ["_airgradient._tcp.local."] "zeroconf": ["_airgradient._tcp.local."]
} }

View File

@ -17,13 +17,13 @@ class BroadlinkEntity(Entity):
self._device = device self._device = device
self._coordinator = device.update_manager.coordinator self._coordinator = device.update_manager.coordinator
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Call when the entity is added to hass.""" """Call when the entity is added to hass."""
self.async_on_remove(self._coordinator.async_add_listener(self._recv_data)) self.async_on_remove(self._coordinator.async_add_listener(self._recv_data))
if self._coordinator.data: if self._coordinator.data:
self._update_state(self._coordinator.data) self._update_state(self._coordinator.data)
async def async_update(self): async def async_update(self) -> None:
"""Update the state of the entity.""" """Update the state of the entity."""
await self._coordinator.async_request_refresh() await self._coordinator.async_request_refresh()
@ -49,7 +49,7 @@ class BroadlinkEntity(Entity):
""" """
@property @property
def available(self): def available(self) -> bool:
"""Return True if the entity is available.""" """Return True if the entity is available."""
return self._device.available return self._device.available

View File

@ -11,7 +11,11 @@ from typing import Any
from aiohttp import ClientError from aiohttp import ClientError
from hass_nabucasa import Cloud, CloudError from hass_nabucasa import Cloud, CloudError
from hass_nabucasa.api import CloudApiNonRetryableError from hass_nabucasa.api import CloudApiNonRetryableError
from hass_nabucasa.cloud_api import async_files_delete_file, async_files_list from hass_nabucasa.cloud_api import (
FilesHandlerListEntry,
async_files_delete_file,
async_files_list,
)
from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5 from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
@ -76,11 +80,6 @@ class CloudBackupAgent(BackupAgent):
self._cloud = cloud self._cloud = cloud
self._hass = hass self._hass = hass
@callback
def _get_backup_filename(self) -> str:
"""Return the backup filename."""
return f"{self._cloud.client.prefs.instance_id}.tar"
async def async_download_backup( async def async_download_backup(
self, self,
backup_id: str, backup_id: str,
@ -91,13 +90,13 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups. :param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes. :return: An async iterator that yields bytes.
""" """
if not await self.async_get_backup(backup_id): if not (backup := await self._async_get_backup(backup_id)):
raise BackupAgentError("Backup not found") raise BackupAgentError("Backup not found")
try: try:
content = await self._cloud.files.download( content = await self._cloud.files.download(
storage_type=StorageType.BACKUP, storage_type=StorageType.BACKUP,
filename=self._get_backup_filename(), filename=backup["Key"],
) )
except CloudError as err: except CloudError as err:
raise BackupAgentError(f"Failed to download backup: {err}") from err raise BackupAgentError(f"Failed to download backup: {err}") from err
@ -124,7 +123,7 @@ class CloudBackupAgent(BackupAgent):
base64md5hash = await calculate_b64md5(open_stream, size) base64md5hash = await calculate_b64md5(open_stream, size)
except FilesError as err: except FilesError as err:
raise BackupAgentError(err) from err raise BackupAgentError(err) from err
filename = self._get_backup_filename() filename = f"{self._cloud.client.prefs.instance_id}.tar"
metadata = backup.as_dict() metadata = backup.as_dict()
tries = 1 tries = 1
@ -172,29 +171,34 @@ class CloudBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups. :param backup_id: The ID of the backup that was returned in async_list_backups.
""" """
if not await self.async_get_backup(backup_id): if not (backup := await self._async_get_backup(backup_id)):
return return
try: try:
await async_files_delete_file( await async_files_delete_file(
self._cloud, self._cloud,
storage_type=StorageType.BACKUP, storage_type=StorageType.BACKUP,
filename=self._get_backup_filename(), filename=backup["Key"],
) )
except (ClientError, CloudError) as err: except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to delete backup") from err raise BackupAgentError("Failed to delete backup") from err
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
backups = await self._async_list_backups()
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups]
async def _async_list_backups(self) -> list[FilesHandlerListEntry]:
"""List backups.""" """List backups."""
try: try:
backups = await async_files_list( backups = await async_files_list(
self._cloud, storage_type=StorageType.BACKUP self._cloud, storage_type=StorageType.BACKUP
) )
_LOGGER.debug("Cloud backups: %s", backups)
except (ClientError, CloudError) as err: except (ClientError, CloudError) as err:
raise BackupAgentError("Failed to list backups") from err raise BackupAgentError("Failed to list backups") from err
return [AgentBackup.from_dict(backup["Metadata"]) for backup in backups] _LOGGER.debug("Cloud backups: %s", backups)
return backups
async def async_get_backup( async def async_get_backup(
self, self,
@ -202,10 +206,19 @@ class CloudBackupAgent(BackupAgent):
**kwargs: Any, **kwargs: Any,
) -> AgentBackup | None: ) -> AgentBackup | None:
"""Return a backup.""" """Return a backup."""
backups = await self.async_list_backups() if not (backup := await self._async_get_backup(backup_id)):
return None
return AgentBackup.from_dict(backup["Metadata"])
async def _async_get_backup(
self,
backup_id: str,
) -> FilesHandlerListEntry | None:
"""Return a backup."""
backups = await self._async_list_backups()
for backup in backups: for backup in backups:
if backup.backup_id == backup_id: if backup["Metadata"]["backup_id"] == backup_id:
return backup return backup
return None return None

View File

@ -2,16 +2,18 @@
from __future__ import annotations from __future__ import annotations
import asyncio
from collections.abc import Callable from collections.abc import Callable
from aiohttp import ClientError from aiohttp import ClientError
from eheimdigital.device import EheimDigitalDevice from eheimdigital.device import EheimDigitalDevice
from eheimdigital.hub import EheimDigitalHub from eheimdigital.hub import EheimDigitalHub
from eheimdigital.types import EheimDeviceType from eheimdigital.types import EheimDeviceType, EheimDigitalClientError
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL from homeassistant.helpers.entity_component import DEFAULT_SCAN_INTERVAL
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -43,12 +45,14 @@ class EheimDigitalUpdateCoordinator(
name=DOMAIN, name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL, update_interval=DEFAULT_SCAN_INTERVAL,
) )
self.main_device_added_event = asyncio.Event()
self.hub = EheimDigitalHub( self.hub = EheimDigitalHub(
host=self.config_entry.data[CONF_HOST], host=self.config_entry.data[CONF_HOST],
session=async_get_clientsession(hass), session=async_get_clientsession(hass),
loop=hass.loop, loop=hass.loop,
receive_callback=self._async_receive_callback, receive_callback=self._async_receive_callback,
device_found_callback=self._async_device_found, device_found_callback=self._async_device_found,
main_device_added_event=self.main_device_added_event,
) )
self.known_devices: set[str] = set() self.known_devices: set[str] = set()
self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set() self.platform_callbacks: set[AsyncSetupDeviceEntitiesCallback] = set()
@ -76,8 +80,17 @@ class EheimDigitalUpdateCoordinator(
self.async_set_updated_data(self.hub.devices) self.async_set_updated_data(self.hub.devices)
async def _async_setup(self) -> None: async def _async_setup(self) -> None:
await self.hub.connect() try:
await self.hub.update() await self.hub.connect()
async with asyncio.timeout(2):
# This event gets triggered when the first message is received from
# the device, it contains the data necessary to create the main device.
# This removes the race condition where the main device is accessed
# before the response from the device is parsed.
await self.main_device_added_event.wait()
await self.hub.update()
except (TimeoutError, EheimDigitalClientError) as err:
raise ConfigEntryNotReady from err
async def _async_update_data(self) -> dict[str, EheimDigitalDevice]: async def _async_update_data(self) -> dict[str, EheimDigitalDevice]:
try: try:

View File

@ -498,7 +498,11 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
self, discovery_info: ZeroconfServiceInfo self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Handle device found via zeroconf.""" """Handle device found via zeroconf."""
host = discovery_info.host host = (
f"[{discovery_info.ip_address}]"
if discovery_info.ip_address.version == 6
else str(discovery_info.ip_address)
)
https_port = ( https_port = (
int(discovery_info.port) int(discovery_info.port)
if discovery_info.port is not None if discovery_info.port is not None

View File

@ -6,5 +6,5 @@
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["sense_energy"], "loggers": ["sense_energy"],
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["sense-energy==0.13.4"] "requirements": ["sense-energy==0.13.5"]
} }

View File

@ -16,7 +16,7 @@ class EnOceanEntity(Entity):
"""Initialize the device.""" """Initialize the device."""
self.dev_id = dev_id self.dev_id = dev_id
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register callbacks.""" """Register callbacks."""
self.async_on_remove( self.async_on_remove(
async_dispatcher_connect( async_dispatcher_connect(

View File

@ -16,7 +16,7 @@
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
"mqtt": ["esphome/discover/#"], "mqtt": ["esphome/discover/#"],
"requirements": [ "requirements": [
"aioesphomeapi==29.0.2", "aioesphomeapi==29.1.0",
"esphome-dashboard-api==1.2.3", "esphome-dashboard-api==1.2.3",
"bleak-esphome==2.7.1" "bleak-esphome==2.7.1"
], ],

View File

@ -47,6 +47,10 @@ async def async_setup_entry(
) )
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
class FlexitBinarySensor(FlexitEntity, BinarySensorEntity): class FlexitBinarySensor(FlexitEntity, BinarySensorEntity):
"""Representation of a Flexit binary Sensor.""" """Representation of a Flexit binary Sensor."""

View File

@ -25,6 +25,7 @@ from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ( from .const import (
DOMAIN,
MAX_TEMP, MAX_TEMP,
MIN_TEMP, MIN_TEMP,
PRESET_TO_VENTILATION_MODE_MAP, PRESET_TO_VENTILATION_MODE_MAP,
@ -43,6 +44,9 @@ async def async_setup_entry(
async_add_entities([FlexitClimateEntity(config_entry.runtime_data)]) async_add_entities([FlexitClimateEntity(config_entry.runtime_data)])
PARALLEL_UPDATES = 1
class FlexitClimateEntity(FlexitEntity, ClimateEntity): class FlexitClimateEntity(FlexitEntity, ClimateEntity):
"""Flexit air handling unit.""" """Flexit air handling unit."""
@ -130,7 +134,13 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
try: try:
await self.device.set_ventilation_mode(ventilation_mode) await self.device.set_ventilation_mode(ventilation_mode)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_preset_mode",
translation_placeholders={
"preset": str(ventilation_mode),
},
) from exc
finally: finally:
await self.coordinator.async_refresh() await self.coordinator.async_refresh()
@ -150,6 +160,12 @@ class FlexitClimateEntity(FlexitEntity, ClimateEntity):
else: else:
await self.device.set_ventilation_mode(VENTILATION_MODE_HOME) await self.device.set_ventilation_mode(VENTILATION_MODE_HOME)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_hvac_mode",
translation_placeholders={
"mode": str(hvac_mode),
},
) from exc
finally: finally:
await self.coordinator.async_refresh() await self.coordinator.async_refresh()

View File

@ -49,7 +49,11 @@ class FlexitCoordinator(DataUpdateCoordinator[FlexitBACnet]):
await self.device.update() await self.device.update()
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise ConfigEntryNotReady( raise ConfigEntryNotReady(
f"Timeout while connecting to {self.config_entry.data[CONF_IP_ADDRESS]}" translation_domain=DOMAIN,
translation_key="not_ready",
translation_placeholders={
"ip": str(self.config_entry.data[CONF_IP_ADDRESS]),
},
) from exc ) from exc
return self.device return self.device

View File

@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/flexit_bacnet", "documentation": "https://www.home-assistant.io/integrations/flexit_bacnet",
"integration_type": "device", "integration_type": "device",
"iot_class": "local_polling", "iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["flexit_bacnet==2.2.3"] "requirements": ["flexit_bacnet==2.2.3"]
} }

View File

@ -18,6 +18,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import FlexitConfigEntry, FlexitCoordinator from .coordinator import FlexitConfigEntry, FlexitCoordinator
from .entity import FlexitEntity from .entity import FlexitEntity
@ -205,6 +206,9 @@ async def async_setup_entry(
) )
PARALLEL_UPDATES = 1
class FlexitNumber(FlexitEntity, NumberEntity): class FlexitNumber(FlexitEntity, NumberEntity):
"""Representation of a Flexit Number.""" """Representation of a Flexit Number."""
@ -246,6 +250,12 @@ class FlexitNumber(FlexitEntity, NumberEntity):
try: try:
await set_native_value_fn(int(value)) await set_native_value_fn(int(value))
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_value_error",
translation_placeholders={
"value": str(value),
},
) from exc
finally: finally:
await self.coordinator.async_refresh() await self.coordinator.async_refresh()

View File

@ -0,0 +1,91 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
Integration does not define custom actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not use any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities don't subscribe to events explicitly
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup:
status: done
comment: |
Done implicitly with `await coordinator.async_config_entry_first_refresh()`.
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
Integration does not use options flow.
docs-installation-parameters: done
entity-unavailable:
status: done
comment: |
Done implicitly with coordinator.
integration-owner: done
log-when-unavailable:
status: done
comment: |
Done implicitly with coordinator.
parallel-updates: done
reauthentication-flow:
status: exempt
comment: |
Integration doesn't require any form of authentication.
test-coverage: todo
# Gold
entity-translations: done
entity-device-class: done
devices: done
entity-category: todo
entity-disabled-by-default: todo
discovery: todo
stale-devices:
status: exempt
comment: |
Device type integration.
diagnostics: todo
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
dynamic-devices:
status: exempt
comment: |
Device type integration.
discovery-update-info: todo
repair-issues:
status: exempt
comment: |
This is not applicable for this integration.
docs-use-cases: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-data-update: done
docs-known-limitations: todo
docs-troubleshooting: todo
docs-examples: todo
# Platinum
async-dependency: todo
inject-websession: todo
strict-typing: done

View File

@ -161,6 +161,10 @@ async def async_setup_entry(
) )
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
class FlexitSensor(FlexitEntity, SensorEntity): class FlexitSensor(FlexitEntity, SensorEntity):
"""Representation of a Flexit (bacnet) Sensor.""" """Representation of a Flexit (bacnet) Sensor."""

View File

@ -5,6 +5,10 @@
"data": { "data": {
"ip_address": "[%key:common::config_flow::data::ip%]", "ip_address": "[%key:common::config_flow::data::ip%]",
"device_id": "[%key:common::config_flow::data::device%]" "device_id": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"ip_address": "The IP address of the Flexit Nordic device",
"device_id": "The device ID of the Flexit Nordic device"
} }
} }
}, },
@ -115,5 +119,22 @@
"name": "Cooker hood mode" "name": "Cooker hood mode"
} }
} }
},
"exceptions": {
"set_value_error": {
"message": "Failed setting the value {value}."
},
"switch_turn": {
"message": "Failed to turn the switch {state}."
},
"set_preset_mode": {
"message": "Failed to set preset mode {preset}."
},
"set_hvac_mode": {
"message": "Failed to set HVAC mode {mode}."
},
"not_ready": {
"message": "Timeout while connecting to {ip}."
}
} }
} }

View File

@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import FlexitConfigEntry, FlexitCoordinator from .coordinator import FlexitConfigEntry, FlexitCoordinator
from .entity import FlexitEntity from .entity import FlexitEntity
@ -68,6 +69,9 @@ async def async_setup_entry(
) )
PARALLEL_UPDATES = 1
class FlexitSwitch(FlexitEntity, SwitchEntity): class FlexitSwitch(FlexitEntity, SwitchEntity):
"""Representation of a Flexit Switch.""" """Representation of a Flexit Switch."""
@ -94,19 +98,31 @@ class FlexitSwitch(FlexitEntity, SwitchEntity):
return self.entity_description.is_on_fn(self.coordinator.data) return self.entity_description.is_on_fn(self.coordinator.data)
async def async_turn_on(self, **kwargs: Any) -> None: async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn electric heater on.""" """Turn switch on."""
try: try:
await self.entity_description.turn_on_fn(self.coordinator.data) await self.entity_description.turn_on_fn(self.coordinator.data)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="switch_turn",
translation_placeholders={
"state": "on",
},
) from exc
finally: finally:
await self.coordinator.async_refresh() await self.coordinator.async_refresh()
async def async_turn_off(self, **kwargs: Any) -> None: async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn electric heater off.""" """Turn switch off."""
try: try:
await self.entity_description.turn_off_fn(self.coordinator.data) await self.entity_description.turn_off_fn(self.coordinator.data)
except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc: except (asyncio.exceptions.TimeoutError, ConnectionError, DecodingError) as exc:
raise HomeAssistantError from exc raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="switch_turn",
translation_placeholders={
"state": "off",
},
) from exc
finally: finally:
await self.coordinator.async_refresh() await self.coordinator.async_refresh()

View File

@ -45,10 +45,10 @@ class FloEntity(Entity):
"""Return True if device is available.""" """Return True if device is available."""
return self._device.available return self._device.available
async def async_update(self): async def async_update(self) -> None:
"""Update Flo entity.""" """Update Flo entity."""
await self._device.async_request_refresh() await self._device.async_request_refresh()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""When entity is added to hass.""" """When entity is added to hass."""
self.async_on_remove(self._device.async_add_listener(self.async_write_ha_state)) self.async_on_remove(self._device.async_add_listener(self.async_write_ha_state))

View File

@ -35,7 +35,7 @@
"services": { "services": {
"ptz": { "ptz": {
"name": "PTZ", "name": "PTZ",
"description": "Pan/Tilt action for Foscam camera.", "description": "Moves a Foscam camera to a specified direction.",
"fields": { "fields": {
"movement": { "movement": {
"name": "Movement", "name": "Movement",
@ -49,7 +49,7 @@
}, },
"ptz_preset": { "ptz_preset": {
"name": "PTZ preset", "name": "PTZ preset",
"description": "PTZ Preset action for Foscam camera.", "description": "Moves a Foscam camera to a predefined position.",
"fields": { "fields": {
"preset_name": { "preset_name": {
"name": "Preset name", "name": "Preset name",

View File

@ -196,6 +196,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
self.hass = hass self.hass = hass
self.host = host self.host = host
self.mesh_role = MeshRoles.NONE self.mesh_role = MeshRoles.NONE
self.mesh_wifi_uplink = False
self.device_conn_type: str | None = None self.device_conn_type: str | None = None
self.device_is_router: bool = False self.device_is_router: bool = False
self.password = password self.password = password
@ -610,6 +611,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
ssid=interf.get("ssid", ""), ssid=interf.get("ssid", ""),
type=interf["type"], type=interf["type"],
) )
if interf["type"].lower() == "wlan" and interf[
"name"
].lower().startswith("uplink"):
self.mesh_wifi_uplink = True
if dr.format_mac(int_mac) == self.mac: if dr.format_mac(int_mac) == self.mac:
self.mesh_role = MeshRoles(node["mesh_role"]) self.mesh_role = MeshRoles(node["mesh_role"])

View File

@ -207,8 +207,9 @@ async def async_all_entities_list(
local_ip: str, local_ip: str,
) -> list[Entity]: ) -> list[Entity]:
"""Get a list of all entities.""" """Get a list of all entities."""
if avm_wrapper.mesh_role == MeshRoles.SLAVE: if avm_wrapper.mesh_role == MeshRoles.SLAVE:
if not avm_wrapper.mesh_wifi_uplink:
return [*await _async_wifi_entities_list(avm_wrapper, device_friendly_name)]
return [] return []
return [ return [
@ -565,6 +566,9 @@ class FritzBoxWifiSwitch(FritzBoxBaseSwitch):
self._attributes = {} self._attributes = {}
self._attr_entity_category = EntityCategory.CONFIG self._attr_entity_category = EntityCategory.CONFIG
self._attr_entity_registry_enabled_default = (
avm_wrapper.mesh_role is not MeshRoles.SLAVE
)
self._network_num = network_num self._network_num = network_num
switch_info = SwitchInfo( switch_info = SwitchInfo(

View File

@ -10,7 +10,7 @@ from google.oauth2.credentials import Credentials
import voluptuous as vol import voluptuous as vol
from homeassistant.components import conversation from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
from homeassistant.core import ( from homeassistant.core import (
HomeAssistant, HomeAssistant,
@ -99,12 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
hass.data[DOMAIN].pop(entry.entry_id) hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
for service_name in hass.services.async_services_for_domain(DOMAIN): for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name) hass.services.async_remove(DOMAIN, service_name)

View File

@ -2,7 +2,7 @@
from __future__ import annotations from __future__ import annotations
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME, Platform from homeassistant.const import CONF_NAME, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, discovery from homeassistant.helpers import config_validation as cv, discovery
@ -59,12 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -
async def async_unload_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: GoogleMailConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
for service_name in hass.services.async_services_for_domain(DOMAIN): for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name) hass.services.async_remove(DOMAIN, service_name)

View File

@ -12,7 +12,7 @@ from gspread.exceptions import APIError
from gspread.utils import ValueInputOption from gspread.utils import ValueInputOption
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ( from homeassistant.exceptions import (
@ -81,12 +81,7 @@ async def async_unload_entry(
hass: HomeAssistant, entry: GoogleSheetsConfigEntry hass: HomeAssistant, entry: GoogleSheetsConfigEntry
) -> bool: ) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
for service_name in hass.services.async_services_for_domain(DOMAIN): for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name) hass.services.async_remove(DOMAIN, service_name)

View File

@ -11,7 +11,7 @@ from aioguardian import Client
from aioguardian.errors import GuardianError from aioguardian.errors import GuardianError
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( from homeassistant.const import (
ATTR_DEVICE_ID, ATTR_DEVICE_ID,
CONF_DEVICE_ID, CONF_DEVICE_ID,
@ -247,12 +247,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok: if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id) hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# If this is the last loaded instance of Guardian, deregister any services # If this is the last loaded instance of Guardian, deregister any services
# defined during integration setup: # defined during integration setup:
for service_name in SERVICES: for service_name in SERVICES:

View File

@ -69,3 +69,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_remove_config_entry_device(
hass: HomeAssistant, entry: HeosConfigEntry, device: dr.DeviceEntry
) -> bool:
"""Remove config entry from device if no longer present."""
return not any(
(domain, key)
for domain, key in device.identifiers
if domain == DOMAIN and int(key) in entry.runtime_data.heos.players
)

View File

@ -16,6 +16,7 @@ from pyheos import (
HeosError, HeosError,
HeosNowPlayingMedia, HeosNowPlayingMedia,
HeosOptions, HeosOptions,
HeosPlayer,
MediaItem, MediaItem,
MediaType, MediaType,
PlayerUpdateResult, PlayerUpdateResult,
@ -58,6 +59,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
credentials=credentials, credentials=credentials,
) )
) )
self._platform_callbacks: list[Callable[[Sequence[HeosPlayer]], None]] = []
self._update_sources_pending: bool = False self._update_sources_pending: bool = False
self._source_list: list[str] = [] self._source_list: list[str] = []
self._favorites: dict[int, MediaItem] = {} self._favorites: dict[int, MediaItem] = {}
@ -124,6 +126,27 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
self.async_update_listeners() self.async_update_listeners()
return remove_listener return remove_listener
def async_add_platform_callback(
self, add_entities_callback: Callable[[Sequence[HeosPlayer]], None]
) -> None:
"""Add a callback to add entities for a platform."""
self._platform_callbacks.append(add_entities_callback)
def _async_handle_player_update_result(
self, update_result: PlayerUpdateResult
) -> None:
"""Handle a player update result."""
if update_result.added_player_ids and self._platform_callbacks:
new_players = [
self.heos.players[player_id]
for player_id in update_result.added_player_ids
]
for add_entities_callback in self._platform_callbacks:
add_entities_callback(new_players)
if update_result.updated_player_ids:
self._async_update_player_ids(update_result.updated_player_ids)
async def _async_on_auth_failure(self) -> None: async def _async_on_auth_failure(self) -> None:
"""Handle when the user credentials are no longer valid.""" """Handle when the user credentials are no longer valid."""
assert self.config_entry is not None assert self.config_entry is not None
@ -147,8 +170,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
"""Handle a controller event, such as players or groups changed.""" """Handle a controller event, such as players or groups changed."""
if event == const.EVENT_PLAYERS_CHANGED: if event == const.EVENT_PLAYERS_CHANGED:
assert data is not None assert data is not None
if data.updated_player_ids: self._async_handle_player_update_result(data)
self._async_update_player_ids(data.updated_player_ids)
elif ( elif (
event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED) event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED)
and not self._update_sources_pending and not self._update_sources_pending
@ -242,9 +264,7 @@ class HeosCoordinator(DataUpdateCoordinator[None]):
except HeosError as error: except HeosError as error:
_LOGGER.error("Unable to refresh players: %s", error) _LOGGER.error("Unable to refresh players: %s", error)
return return
# After reconnecting, player_id may have changed self._async_handle_player_update_result(player_updates)
if player_updates.updated_player_ids:
self._async_update_player_ids(player_updates.updated_player_ids)
@callback @callback
def async_get_source_list(self) -> list[str]: def async_get_source_list(self) -> list[str]:

View File

@ -2,7 +2,7 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine from collections.abc import Awaitable, Callable, Coroutine, Sequence
from datetime import datetime from datetime import datetime
from functools import reduce, wraps from functools import reduce, wraps
from operator import ior from operator import ior
@ -93,11 +93,16 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback, async_add_entities: AddConfigEntryEntitiesCallback,
) -> None: ) -> None:
"""Add media players for a config entry.""" """Add media players for a config entry."""
devices = [
HeosMediaPlayer(entry.runtime_data, player) def add_entities_callback(players: Sequence[HeosPlayer]) -> None:
for player in entry.runtime_data.heos.players.values() """Add entities for each player."""
] async_add_entities(
async_add_entities(devices) [HeosMediaPlayer(entry.runtime_data, player) for player in players]
)
coordinator = entry.runtime_data
coordinator.async_add_platform_callback(add_entities_callback)
add_entities_callback(list(coordinator.heos.players.values()))
type _FuncType[**_P] = Callable[_P, Awaitable[Any]] type _FuncType[**_P] = Callable[_P, Awaitable[Any]]

View File

@ -49,7 +49,7 @@ rules:
docs-supported-functions: done docs-supported-functions: done
docs-troubleshooting: done docs-troubleshooting: done
docs-use-cases: done docs-use-cases: done
dynamic-devices: todo dynamic-devices: done
entity-category: done entity-category: done
entity-device-class: done entity-device-class: done
entity-disabled-by-default: done entity-disabled-by-default: done
@ -57,8 +57,8 @@ rules:
exception-translations: done exception-translations: done
icon-translations: done icon-translations: done
reconfiguration-flow: done reconfiguration-flow: done
repair-issues: todo repair-issues: done
stale-devices: todo stale-devices: done
# Platinum # Platinum
async-dependency: done async-dependency: done
inject-websession: inject-websession:

View File

@ -35,7 +35,7 @@ class SW16Entity(Entity):
self.async_write_ha_state() self.async_write_ha_state()
@property @property
def available(self): def available(self) -> bool:
"""Return True if entity is available.""" """Return True if entity is available."""
return bool(self._client.is_connected) return bool(self._client.is_connected)
@ -44,7 +44,7 @@ class SW16Entity(Entity):
"""Update availability state.""" """Update availability state."""
self.async_write_ha_state() self.async_write_ha_state()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register update callback.""" """Register update callback."""
self._client.register_status_callback( self._client.register_status_callback(
self.handle_event_callback, self._device_port self.handle_event_callback, self._device_port

View File

@ -62,7 +62,7 @@ class HMDevice(Entity):
if self._state: if self._state:
self._state = self._state.upper() self._state = self._state.upper()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Load data init callbacks.""" """Load data init callbacks."""
self._subscribe_homematic_events() self._subscribe_homematic_events()
@ -77,7 +77,7 @@ class HMDevice(Entity):
return self._name return self._name
@property @property
def available(self): def available(self) -> bool:
"""Return true if device is available.""" """Return true if device is available."""
return self._available return self._available

View File

@ -54,7 +54,7 @@ class IHCEntity(Entity):
self.ihc_note = "" self.ihc_note = ""
self.ihc_position = "" self.ihc_position = ""
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Add callback for IHC changes.""" """Add callback for IHC changes."""
_LOGGER.debug("Adding IHC entity notify event: %s", self.ihc_id) _LOGGER.debug("Adding IHC entity notify event: %s", self.ihc_id)
self.ihc_controller.add_notify_event(self.ihc_id, self.on_ihc_change, True) self.ihc_controller.add_notify_event(self.ihc_id, self.on_ihc_change, True)

View File

@ -109,7 +109,7 @@ class InsteonEntity(Entity):
) )
self.async_write_ha_state() self.async_write_ha_state()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register INSTEON update events.""" """Register INSTEON update events."""
_LOGGER.debug( _LOGGER.debug(
"Tracking updates for device %s group %d name %s", "Tracking updates for device %s group %d name %s",
@ -137,7 +137,7 @@ class InsteonEntity(Entity):
) )
) )
async def async_will_remove_from_hass(self): async def async_will_remove_from_hass(self) -> None:
"""Unsubscribe to INSTEON update events.""" """Unsubscribe to INSTEON update events."""
_LOGGER.debug( _LOGGER.debug(
"Remove tracking updates for device %s group %d name %s", "Remove tracking updates for device %s group %d name %s",

View File

@ -106,7 +106,7 @@ class ISYNodeEntity(ISYEntity):
return getattr(self._node, TAG_ENABLED, True) return getattr(self._node, TAG_ENABLED, True)
@property @property
def extra_state_attributes(self) -> dict: def extra_state_attributes(self) -> dict[str, Any]:
"""Get the state attributes for the device. """Get the state attributes for the device.
The 'aux_properties' in the pyisy Node class are combined with the The 'aux_properties' in the pyisy Node class are combined with the
@ -189,7 +189,7 @@ class ISYProgramEntity(ISYEntity):
self._actions = actions self._actions = actions
@property @property
def extra_state_attributes(self) -> dict: def extra_state_attributes(self) -> dict[str, Any]:
"""Get the state attributes for the device.""" """Get the state attributes for the device."""
attr = {} attr = {}
if self._actions: if self._actions:

View File

@ -407,6 +407,12 @@
}, },
"power_level_for_location": { "power_level_for_location": {
"default": "mdi:radiator" "default": "mdi:radiator"
},
"cycle_count": {
"default": "mdi:counter"
},
"cycle_count_for_location": {
"default": "mdi:counter"
} }
} }
} }

View File

@ -248,6 +248,24 @@ TEMPERATURE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
state_class=SensorStateClass.MEASUREMENT, state_class=SensorStateClass.MEASUREMENT,
translation_key=ThinQProperty.CURRENT_TEMPERATURE, translation_key=ThinQProperty.CURRENT_TEMPERATURE,
), ),
ThinQPropertyEx.ROOM_AIR_CURRENT_TEMPERATURE: SensorEntityDescription(
key=ThinQPropertyEx.ROOM_AIR_CURRENT_TEMPERATURE,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
translation_key=ThinQPropertyEx.ROOM_AIR_CURRENT_TEMPERATURE,
),
ThinQPropertyEx.ROOM_IN_WATER_CURRENT_TEMPERATURE: SensorEntityDescription(
key=ThinQPropertyEx.ROOM_IN_WATER_CURRENT_TEMPERATURE,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
translation_key=ThinQPropertyEx.ROOM_IN_WATER_CURRENT_TEMPERATURE,
),
ThinQPropertyEx.ROOM_OUT_WATER_CURRENT_TEMPERATURE: SensorEntityDescription(
key=ThinQPropertyEx.ROOM_OUT_WATER_CURRENT_TEMPERATURE,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
translation_key=ThinQPropertyEx.ROOM_OUT_WATER_CURRENT_TEMPERATURE,
),
} }
WATER_FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = { WATER_FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
ThinQProperty.USED_TIME: SensorEntityDescription( ThinQProperty.USED_TIME: SensorEntityDescription(
@ -341,6 +359,10 @@ TIMER_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
} }
WASHER_SENSORS: tuple[SensorEntityDescription, ...] = ( WASHER_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key=ThinQProperty.CYCLE_COUNT,
translation_key=ThinQProperty.CYCLE_COUNT,
),
RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE],
TIMER_SENSOR_DESC[TimerProperty.TOTAL], TIMER_SENSOR_DESC[TimerProperty.TOTAL],
TIMER_SENSOR_DESC[TimerProperty.RELATIVE_TO_START_WM], TIMER_SENSOR_DESC[TimerProperty.RELATIVE_TO_START_WM],
@ -470,6 +492,11 @@ DEVICE_TYPE_SENSOR_MAP: dict[DeviceType, tuple[SensorEntityDescription, ...]] =
RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE], RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE],
), ),
DeviceType.STYLER: WASHER_SENSORS, DeviceType.STYLER: WASHER_SENSORS,
DeviceType.SYSTEM_BOILER: (
TEMPERATURE_SENSOR_DESC[ThinQPropertyEx.ROOM_AIR_CURRENT_TEMPERATURE],
TEMPERATURE_SENSOR_DESC[ThinQPropertyEx.ROOM_IN_WATER_CURRENT_TEMPERATURE],
TEMPERATURE_SENSOR_DESC[ThinQPropertyEx.ROOM_OUT_WATER_CURRENT_TEMPERATURE],
),
DeviceType.WASHCOMBO_MAIN: WASHER_SENSORS, DeviceType.WASHCOMBO_MAIN: WASHER_SENSORS,
DeviceType.WASHCOMBO_MINI: WASHER_SENSORS, DeviceType.WASHCOMBO_MINI: WASHER_SENSORS,
DeviceType.WASHER: WASHER_SENSORS, DeviceType.WASHER: WASHER_SENSORS,

View File

@ -305,6 +305,15 @@
"current_temperature": { "current_temperature": {
"name": "Current temperature" "name": "Current temperature"
}, },
"room_air_current_temperature": {
"name": "Indoor temperature"
},
"room_in_water_current_temperature": {
"name": "Inlet temperature"
},
"room_out_water_current_temperature": {
"name": "Outlet temperature"
},
"temperature": { "temperature": {
"name": "Temperature" "name": "Temperature"
}, },
@ -848,6 +857,12 @@
}, },
"power_level_for_location": { "power_level_for_location": {
"name": "{location} power level" "name": "{location} power level"
},
"cycle_count": {
"name": "Cycles"
},
"cycle_count_for_location": {
"name": "{location} cycles"
} }
}, },
"select": { "select": {

View File

@ -19,7 +19,7 @@ from aiolookin import (
) )
from aiolookin.models import UDPCommandType, UDPEvent from aiolookin.models import UDPCommandType, UDPEvent
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
@ -192,12 +192,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id) hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
manager: LookinUDPManager = hass.data[DOMAIN][UDP_MANAGER] manager: LookinUDPManager = hass.data[DOMAIN][UDP_MANAGER]
await manager.async_stop() await manager.async_stop()
return unload_ok return unload_ok

View File

@ -18,7 +18,7 @@ class LupusecDevice(Entity):
self._device = device self._device = device
self._attr_unique_id = device.device_id self._attr_unique_id = device.device_id
def update(self): def update(self) -> None:
"""Update automation state.""" """Update automation state."""
self._device.refresh() self._device.refresh()

View File

@ -63,7 +63,7 @@ class LutronCasetaEntity(Entity):
info[ATTR_SUGGESTED_AREA] = area info[ATTR_SUGGESTED_AREA] = area
self._attr_device_info = info self._attr_device_info = info
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register callbacks.""" """Register callbacks."""
self._smartbridge.add_subscriber(self.device_id, self.async_write_ha_state) self._smartbridge.add_subscriber(self.device_id, self.async_write_ha_state)

View File

@ -6,7 +6,7 @@ from typing import TYPE_CHECKING
from motionblinds import AsyncMotionMulticast from motionblinds import AsyncMotionMulticast
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_HOST, EVENT_HOMEASSISTANT_STOP from homeassistant.const import CONF_API_KEY, CONF_HOST, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
@ -124,12 +124,7 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
multicast.Unregister_motion_gateway(config_entry.data[CONF_HOST]) multicast.Unregister_motion_gateway(config_entry.data[CONF_HOST])
hass.data[DOMAIN].pop(config_entry.entry_id) hass.data[DOMAIN].pop(config_entry.entry_id)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# No motion gateways left, stop Motion multicast # No motion gateways left, stop Motion multicast
unsub_stop = hass.data[DOMAIN].pop(KEY_UNSUB_STOP) unsub_stop = hass.data[DOMAIN].pop(KEY_UNSUB_STOP)
unsub_stop() unsub_stop()

View File

@ -62,6 +62,7 @@ MODELS_V2 = [
"RBR", "RBR",
"RBS", "RBS",
"RBW", "RBW",
"RS",
"LBK", "LBK",
"LBR", "LBR",
"CBK", "CBK",

View File

@ -6,7 +6,6 @@ from aiohttp.cookiejar import CookieJar
import eternalegypt import eternalegypt
from eternalegypt.eternalegypt import SMS from eternalegypt.eternalegypt import SMS
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, Platform from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
@ -117,12 +116,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry) -
async def async_unload_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
hass.data.pop(DOMAIN, None) hass.data.pop(DOMAIN, None)
for service_name in hass.services.async_services()[DOMAIN]: for service_name in hass.services.async_services()[DOMAIN]:
hass.services.async_remove(DOMAIN, service_name) hass.services.async_remove(DOMAIN, service_name)

View File

@ -2,7 +2,7 @@
"config": { "config": {
"step": { "step": {
"user": { "user": {
"description": "If a METAR station code is not specified, the latitude and longitude will be used to find the closest station. For now, an API Key can be anything. It is recommended to use a valid email address.", "description": "If a METAR station code is not specified, the latitude and longitude will be used to find the closest station. For now, the API key can be anything. It is recommended to use a valid email address.",
"title": "Connect to the National Weather Service", "title": "Connect to the National Weather Service",
"data": { "data": {
"api_key": "[%key:common::config_flow::data::api_key%]", "api_key": "[%key:common::config_flow::data::api_key%]",
@ -30,12 +30,12 @@
}, },
"services": { "services": {
"get_forecasts_extra": { "get_forecasts_extra": {
"name": "Get extra forecasts data.", "name": "Get extra forecasts data",
"description": "Get extra data for weather forecasts.", "description": "Retrieves extra data for weather forecasts.",
"fields": { "fields": {
"type": { "type": {
"name": "Forecast type", "name": "Forecast type",
"description": "Forecast type: hourly or twice_daily." "description": "The scope of the weather forecast."
} }
} }
} }

View File

@ -6,6 +6,9 @@
} }
}, },
"number": { "number": {
"preconditioning_duration": {
"default": "mdi:fan-clock"
},
"target_percentage": { "target_percentage": {
"default": "mdi:battery-heart" "default": "mdi:battery-heart"
} }

View File

@ -6,7 +6,7 @@ from dataclasses import dataclass
from ohme import ApiException, OhmeApiClient from ohme import ApiException, OhmeApiClient
from homeassistant.components.number import NumberEntity, NumberEntityDescription from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.const import PERCENTAGE from homeassistant.const import PERCENTAGE, UnitOfTime
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@ -37,6 +37,18 @@ NUMBER_DESCRIPTION = [
native_step=1, native_step=1,
native_unit_of_measurement=PERCENTAGE, native_unit_of_measurement=PERCENTAGE,
), ),
OhmeNumberDescription(
key="preconditioning_duration",
translation_key="preconditioning_duration",
value_fn=lambda client: client.preconditioning,
set_fn=lambda client, value: client.async_set_target(
pre_condition_length=value
),
native_min_value=0,
native_max_value=60,
native_step=5,
native_unit_of_measurement=UnitOfTime.MINUTES,
),
] ]

View File

@ -51,6 +51,9 @@
} }
}, },
"number": { "number": {
"preconditioning_duration": {
"name": "Preconditioning duration"
},
"target_percentage": { "target_percentage": {
"name": "Target percentage" "name": "Target percentage"
} }

View File

@ -33,7 +33,6 @@ from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.system_info import async_get_system_info from homeassistant.helpers.system_info import async_get_system_info
from homeassistant.helpers.translation import async_get_translations from homeassistant.helpers.translation import async_get_translations
from homeassistant.setup import async_setup_component from homeassistant.setup import async_setup_component
from homeassistant.util.async_ import create_eager_task
if TYPE_CHECKING: if TYPE_CHECKING:
from . import OnboardingData, OnboardingStorage, OnboardingStoreData from . import OnboardingData, OnboardingStorage, OnboardingStoreData
@ -235,22 +234,21 @@ class CoreConfigOnboardingView(_BaseOnboardingView):
): ):
onboard_integrations.append("rpi_power") onboard_integrations.append("rpi_power")
coros: list[Coroutine[Any, Any, Any]] = [ for domain in onboard_integrations:
hass.config_entries.flow.async_init( # Create tasks so onboarding isn't affected
domain, context={"source": "onboarding"} # by errors in these integrations.
hass.async_create_task(
hass.config_entries.flow.async_init(
domain, context={"source": "onboarding"}
),
f"onboarding_setup_{domain}",
) )
for domain in onboard_integrations
]
if "analytics" not in hass.config.components: if "analytics" not in hass.config.components:
# If by some chance that analytics has not finished # If by some chance that analytics has not finished
# setting up, wait for it here so its ready for the # setting up, wait for it here so its ready for the
# next step. # next step.
coros.append(async_setup_component(hass, "analytics", {})) await async_setup_component(hass, "analytics", {})
# Set up integrations after onboarding and ensure
# analytics is ready for the next step.
await asyncio.gather(*(create_eager_task(coro) for coro in coros))
return self.json({}) return self.json({})

View File

@ -36,7 +36,7 @@ DRIVE_STATE_ENTITIES: tuple[OneDriveSensorEntityDescription, ...] = (
key="total_size", key="total_size",
value_fn=lambda quota: quota.total, value_fn=lambda quota: quota.total,
native_unit_of_measurement=UnitOfInformation.BYTES, native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=0, suggested_display_precision=0,
device_class=SensorDeviceClass.DATA_SIZE, device_class=SensorDeviceClass.DATA_SIZE,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,
@ -46,7 +46,7 @@ DRIVE_STATE_ENTITIES: tuple[OneDriveSensorEntityDescription, ...] = (
key="used_size", key="used_size",
value_fn=lambda quota: quota.used, value_fn=lambda quota: quota.used,
native_unit_of_measurement=UnitOfInformation.BYTES, native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=2, suggested_display_precision=2,
device_class=SensorDeviceClass.DATA_SIZE, device_class=SensorDeviceClass.DATA_SIZE,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,
@ -55,7 +55,7 @@ DRIVE_STATE_ENTITIES: tuple[OneDriveSensorEntityDescription, ...] = (
key="remaining_size", key="remaining_size",
value_fn=lambda quota: quota.remaining, value_fn=lambda quota: quota.remaining,
native_unit_of_measurement=UnitOfInformation.BYTES, native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.GIGABYTES, suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
suggested_display_precision=2, suggested_display_precision=2,
device_class=SensorDeviceClass.DATA_SIZE, device_class=SensorDeviceClass.DATA_SIZE,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,

View File

@ -32,11 +32,11 @@
"issues": { "issues": {
"drive_full": { "drive_full": {
"title": "OneDrive data cap exceeded", "title": "OneDrive data cap exceeded",
"description": "Your OneDrive has exceeded your quota limit. This means your next backup will fail. Please free up some space or upgrade your OneDrive plan. Currently using {used} GB of {total} GB." "description": "Your OneDrive has exceeded your quota limit. This means your next backup will fail. Please free up some space or upgrade your OneDrive plan. Currently using {used} GiB of {total} GiB."
}, },
"drive_almost_full": { "drive_almost_full": {
"title": "OneDrive near data cap", "title": "OneDrive near data cap",
"description": "Your OneDrive is near your quota limit. If you go over this limit your drive will be temporarily frozen and your backups will start failing. Please free up some space or upgrade your OneDrive plan. Currently using {used} GB of {total} GB." "description": "Your OneDrive is near your quota limit. If you go over this limit your drive will be temporarily frozen and your backups will start failing. Please free up some space or upgrade your OneDrive plan. Currently using {used} GiB of {total} GiB."
} }
}, },
"exceptions": { "exceptions": {

View File

@ -17,7 +17,7 @@ class ONVIFBaseEntity(Entity):
self.device: ONVIFDevice = device self.device: ONVIFDevice = device
@property @property
def available(self): def available(self) -> bool:
"""Return True if device is available.""" """Return True if device is available."""
return self.device.available return self.device.available

View File

@ -385,7 +385,7 @@
}, },
"set_central_heating_ovrd": { "set_central_heating_ovrd": {
"name": "Set central heating override", "name": "Set central heating override",
"description": "Sets the central heating override option on the gateway. When overriding the control setpoint (via a set_control_setpoint action with a value other than 0), the gateway automatically enables the central heating override to start heating. This action can then be used to control the central heating override status. To return control of the central heating to the thermostat, use the set_control_setpoint action with temperature value 0. You will only need this if you are writing your own software thermostat.", "description": "Sets the central heating override option on the gateway. When overriding the control setpoint (via a 'Set control set point' action with a value other than 0), the gateway automatically enables the central heating override to start heating. This action can then be used to control the central heating override status. To return control of the central heating to the thermostat, use the 'Set control set point' action with temperature value 0. You will only need this if you are writing your own software thermostat.",
"fields": { "fields": {
"gateway_id": { "gateway_id": {
"name": "[%key:component::opentherm_gw::services::reset_gateway::fields::gateway_id::name%]", "name": "[%key:component::opentherm_gw::services::reset_gateway::fields::gateway_id::name%]",
@ -393,7 +393,7 @@
}, },
"ch_override": { "ch_override": {
"name": "Central heating override", "name": "Central heating override",
"description": "The desired boolean value for the central heating override." "description": "Whether to enable or disable the override."
} }
} }
}, },

View File

@ -5,7 +5,7 @@
"data": { "data": {
"url": "[%key:common::config_flow::data::url%]" "url": "[%key:common::config_flow::data::url%]"
}, },
"description": "Provide URL for the Open Thread Border Router's REST API" "description": "Provide URL for the OpenThread Border Router's REST API"
} }
}, },
"error": { "error": {
@ -20,8 +20,8 @@
}, },
"issues": { "issues": {
"get_get_border_agent_id_unsupported": { "get_get_border_agent_id_unsupported": {
"title": "The OTBR does not support border agent ID", "title": "The OTBR does not support Border Agent ID",
"description": "Your OTBR does not support border agent ID.\n\nTo fix this issue, update the OTBR to the latest version and restart Home Assistant.\nTo update the OTBR, update the Open Thread Border Router or Silicon Labs Multiprotocol add-on if you use the OTBR from the add-on, otherwise update your self managed OTBR." "description": "Your OTBR does not support Border Agent ID.\n\nTo fix this issue, update the OTBR to the latest version and restart Home Assistant.\nIf you are using an OTBR integrated in Home Assistant, update either the OpenThread Border Router add-on or the Silicon Labs Multiprotocol add-on. Otherwise update your self-managed OTBR."
}, },
"insecure_thread_network": { "insecure_thread_network": {
"title": "Insecure Thread network settings detected", "title": "Insecure Thread network settings detected",

View File

@ -86,7 +86,7 @@ class PilightBaseDevice(RestoreEntity):
self._brightness = 255 self._brightness = 255
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Call when entity about to be added to hass.""" """Call when entity about to be added to hass."""
await super().async_added_to_hass() await super().async_added_to_hass()
if state := await self.async_get_last_state(): if state := await self.async_get_last_state():
@ -99,7 +99,7 @@ class PilightBaseDevice(RestoreEntity):
return self._name return self._name
@property @property
def assumed_state(self): def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity.""" """Return True if unable to access real state of the entity."""
return True return True

View File

@ -73,13 +73,13 @@ class PlaatoEntity(entity.Entity):
return None return None
@property @property
def available(self): def available(self) -> bool:
"""Return if sensor is available.""" """Return if sensor is available."""
if self._coordinator is not None: if self._coordinator is not None:
return self._coordinator.last_update_success return self._coordinator.last_update_success
return True return True
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""When entity is added to hass.""" """When entity is added to hass."""
if self._coordinator is not None: if self._coordinator is not None:
self.async_on_remove( self.async_on_remove(

View File

@ -52,7 +52,7 @@ class MinutPointEntity(Entity):
) )
await self._update_callback() await self._update_callback()
async def async_will_remove_from_hass(self): async def async_will_remove_from_hass(self) -> None:
"""Disconnect dispatcher listener when removed.""" """Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect: if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect() self._async_unsub_dispatcher_connect()
@ -61,7 +61,7 @@ class MinutPointEntity(Entity):
"""Update the value of the sensor.""" """Update the value of the sensor."""
@property @property
def available(self): def available(self) -> bool:
"""Return true if device is not offline.""" """Return true if device is not offline."""
return self._client.is_available(self.device_id) return self._client.is_available(self.device_id)

View File

@ -53,7 +53,7 @@
"connection_status": { "connection_status": {
"name": "Connection status", "name": "Connection status",
"state": { "state": {
"connected": "Conencted", "connected": "Connected",
"firewalled": "Firewalled", "firewalled": "Firewalled",
"disconnected": "Disconnected" "disconnected": "Disconnected"
} }
@ -109,16 +109,16 @@
}, },
"exceptions": { "exceptions": {
"invalid_device": { "invalid_device": {
"message": "No device with id {device_id} was found" "message": "No device with ID {device_id} was found"
}, },
"invalid_entry_id": { "invalid_entry_id": {
"message": "No entry with id {device_id} was found" "message": "No entry with ID {device_id} was found"
}, },
"login_error": { "login_error": {
"message": "A login error occured. Please check you username and password." "message": "A login error occured. Please check your username and password."
}, },
"cannot_connect": { "cannot_connect": {
"message": "Can't connect to QBittorrent, please check your configuration." "message": "Can't connect to qBittorrent, please check your configuration."
} }
} }
} }

View File

@ -35,7 +35,7 @@ class QSEntity(Entity):
"""Receive update packet from QSUSB. Match dispather_send signature.""" """Receive update packet from QSUSB. Match dispather_send signature."""
self.async_write_ha_state() self.async_write_ha_state()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Listen for updates from QSUSb via dispatcher.""" """Listen for updates from QSUSb via dispatcher."""
self.async_on_remove( self.async_on_remove(
async_dispatcher_connect(self.hass, self.qsid, self.update_packet) async_dispatcher_connect(self.hass, self.qsid, self.update_packet)

View File

@ -45,7 +45,7 @@ class RainCloudEntity(Entity):
"""Return the name of the sensor.""" """Return the name of the sensor."""
return self._name return self._name
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register callbacks.""" """Register callbacks."""
self.async_on_remove( self.async_on_remove(
async_dispatcher_connect( async_dispatcher_connect(

View File

@ -13,7 +13,7 @@ from regenmaschine.controller import Controller
from regenmaschine.errors import RainMachineError, UnknownAPICallError from regenmaschine.errors import RainMachineError, UnknownAPICallError
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( from homeassistant.const import (
CONF_DEVICE_ID, CONF_DEVICE_ID,
CONF_IP_ADDRESS, CONF_IP_ADDRESS,
@ -465,12 +465,7 @@ async def async_unload_entry(
) -> bool: ) -> bool:
"""Unload an RainMachine config entry.""" """Unload an RainMachine config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state is ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# If this is the last loaded instance of RainMachine, deregister any services # If this is the last loaded instance of RainMachine, deregister any services
# defined during integration setup: # defined during integration setup:
for service_name in ( for service_name in (

View File

@ -94,7 +94,7 @@ SENSORS: dict[str, tuple[RefossSensorEntityDescription, ...]] = {
key="energy", key="energy",
translation_key="this_month_energy", translation_key="this_month_energy",
device_class=SensorDeviceClass.ENERGY, device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL, state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_display_precision=2, suggested_display_precision=2,
subkey="mConsume", subkey="mConsume",
@ -104,7 +104,7 @@ SENSORS: dict[str, tuple[RefossSensorEntityDescription, ...]] = {
key="energy_returned", key="energy_returned",
translation_key="this_month_energy_returned", translation_key="this_month_energy_returned",
device_class=SensorDeviceClass.ENERGY, device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL, state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_display_precision=2, suggested_display_precision=2,
subkey="mConsume", subkey="mConsume",

View File

@ -2,7 +2,7 @@
import json import json
import logging import logging
import os from pathlib import Path
from rtmapi import Rtm from rtmapi import Rtm
import voluptuous as vol import voluptuous as vol
@ -160,56 +160,64 @@ class RememberTheMilkConfiguration:
This class stores the authentication token it get from the backend. This class stores the authentication token it get from the backend.
""" """
def __init__(self, hass): def __init__(self, hass: HomeAssistant) -> None:
"""Create new instance of configuration.""" """Create new instance of configuration."""
self._config_file_path = hass.config.path(CONFIG_FILE_NAME) self._config_file_path = hass.config.path(CONFIG_FILE_NAME)
if not os.path.isfile(self._config_file_path): self._config = {}
self._config = {} _LOGGER.debug("Loading configuration from file: %s", self._config_file_path)
return
try: try:
_LOGGER.debug("Loading configuration from file: %s", self._config_file_path) self._config = json.loads(
with open(self._config_file_path, encoding="utf8") as config_file: Path(self._config_file_path).read_text(encoding="utf8")
self._config = json.load(config_file) )
except ValueError: except FileNotFoundError:
_LOGGER.error( _LOGGER.debug("Missing configuration file: %s", self._config_file_path)
"Failed to load configuration file, creating a new one: %s", except OSError:
_LOGGER.debug(
"Failed to read from configuration file, %s, using empty configuration",
self._config_file_path,
)
except ValueError:
_LOGGER.error(
"Failed to parse configuration file, %s, using empty configuration",
self._config_file_path, self._config_file_path,
) )
self._config = {}
def save_config(self): def _save_config(self) -> None:
"""Write the configuration to a file.""" """Write the configuration to a file."""
with open(self._config_file_path, "w", encoding="utf8") as config_file: Path(self._config_file_path).write_text(
json.dump(self._config, config_file) json.dumps(self._config), encoding="utf8"
)
def get_token(self, profile_name): def get_token(self, profile_name: str) -> str | None:
"""Get the server token for a profile.""" """Get the server token for a profile."""
if profile_name in self._config: if profile_name in self._config:
return self._config[profile_name][CONF_TOKEN] return self._config[profile_name][CONF_TOKEN]
return None return None
def set_token(self, profile_name, token): def set_token(self, profile_name: str, token: str) -> None:
"""Store a new server token for a profile.""" """Store a new server token for a profile."""
self._initialize_profile(profile_name) self._initialize_profile(profile_name)
self._config[profile_name][CONF_TOKEN] = token self._config[profile_name][CONF_TOKEN] = token
self.save_config() self._save_config()
def delete_token(self, profile_name): def delete_token(self, profile_name: str) -> None:
"""Delete a token for a profile. """Delete a token for a profile.
Usually called when the token has expired. Usually called when the token has expired.
""" """
self._config.pop(profile_name, None) self._config.pop(profile_name, None)
self.save_config() self._save_config()
def _initialize_profile(self, profile_name): def _initialize_profile(self, profile_name: str) -> None:
"""Initialize the data structures for a profile.""" """Initialize the data structures for a profile."""
if profile_name not in self._config: if profile_name not in self._config:
self._config[profile_name] = {} self._config[profile_name] = {}
if CONF_ID_MAP not in self._config[profile_name]: if CONF_ID_MAP not in self._config[profile_name]:
self._config[profile_name][CONF_ID_MAP] = {} self._config[profile_name][CONF_ID_MAP] = {}
def get_rtm_id(self, profile_name, hass_id): def get_rtm_id(
self, profile_name: str, hass_id: str
) -> tuple[str, str, str] | None:
"""Get the RTM ids for a Home Assistant task ID. """Get the RTM ids for a Home Assistant task ID.
The id of a RTM tasks consists of the tuple: The id of a RTM tasks consists of the tuple:
@ -221,7 +229,14 @@ class RememberTheMilkConfiguration:
return None return None
return ids[CONF_LIST_ID], ids[CONF_TIMESERIES_ID], ids[CONF_TASK_ID] return ids[CONF_LIST_ID], ids[CONF_TIMESERIES_ID], ids[CONF_TASK_ID]
def set_rtm_id(self, profile_name, hass_id, list_id, time_series_id, rtm_task_id): def set_rtm_id(
self,
profile_name: str,
hass_id: str,
list_id: str,
time_series_id: str,
rtm_task_id: str,
) -> None:
"""Add/Update the RTM task ID for a Home Assistant task IS.""" """Add/Update the RTM task ID for a Home Assistant task IS."""
self._initialize_profile(profile_name) self._initialize_profile(profile_name)
id_tuple = { id_tuple = {
@ -230,11 +245,11 @@ class RememberTheMilkConfiguration:
CONF_TASK_ID: rtm_task_id, CONF_TASK_ID: rtm_task_id,
} }
self._config[profile_name][CONF_ID_MAP][hass_id] = id_tuple self._config[profile_name][CONF_ID_MAP][hass_id] = id_tuple
self.save_config() self._save_config()
def delete_rtm_id(self, profile_name, hass_id): def delete_rtm_id(self, profile_name: str, hass_id: str) -> None:
"""Delete a key mapping.""" """Delete a key mapping."""
self._initialize_profile(profile_name) self._initialize_profile(profile_name)
if hass_id in self._config[profile_name][CONF_ID_MAP]: if hass_id in self._config[profile_name][CONF_ID_MAP]:
del self._config[profile_name][CONF_ID_MAP][hass_id] del self._config[profile_name][CONF_ID_MAP][hass_id]
self.save_config() self._save_config()

View File

@ -105,12 +105,12 @@ class RflinkDevice(Entity):
return self._state return self._state
@property @property
def assumed_state(self): def assumed_state(self) -> bool:
"""Assume device state until first device event sets state.""" """Assume device state until first device event sets state."""
return self._state is None return self._state is None
@property @property
def available(self): def available(self) -> bool:
"""Return True if entity is available.""" """Return True if entity is available."""
return self._available return self._available
@ -120,7 +120,7 @@ class RflinkDevice(Entity):
self._available = availability self._available = availability
self.async_write_ha_state() self.async_write_ha_state()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register update callback.""" """Register update callback."""
await super().async_added_to_hass() await super().async_added_to_hass()
# Remove temporary bogus entity_id if added # Remove temporary bogus entity_id if added
@ -300,7 +300,7 @@ class RflinkCommand(RflinkDevice):
class SwitchableRflinkDevice(RflinkCommand, RestoreEntity): class SwitchableRflinkDevice(RflinkCommand, RestoreEntity):
"""Rflink entity which can switch on/off (eg: light, switch).""" """Rflink entity which can switch on/off (eg: light, switch)."""
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Restore RFLink device state (ON/OFF).""" """Restore RFLink device state (ON/OFF)."""
await super().async_added_to_hass() await super().async_added_to_hass()
if (old_state := await self.async_get_last_state()) is not None: if (old_state := await self.async_get_last_state()) is not None:

View File

@ -80,7 +80,7 @@ class IRobotEntity(Entity):
return None return None
return dt_util.utc_from_timestamp(ts) return dt_util.utc_from_timestamp(ts)
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register callback function.""" """Register callback function."""
self.vacuum.register_on_message_callback(self.on_message) self.vacuum.register_on_message_callback(self.on_message)

View File

@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/sense", "documentation": "https://www.home-assistant.io/integrations/sense",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["sense_energy"], "loggers": ["sense_energy"],
"requirements": ["sense-energy==0.13.4"] "requirements": ["sense-energy==0.13.5"]
} }

View File

@ -429,16 +429,16 @@
} }
}, },
"enable_pure_boost": { "enable_pure_boost": {
"name": "Enable pure boost", "name": "Enable Pure Boost",
"description": "Enables and configures Pure Boost settings.", "description": "Enables and configures Pure Boost settings.",
"fields": { "fields": {
"ac_integration": { "ac_integration": {
"name": "AC integration", "name": "AC integration",
"description": "Integrate with Air Conditioner." "description": "Integrate with air conditioner."
}, },
"geo_integration": { "geo_integration": {
"name": "Geo integration", "name": "Geo integration",
"description": "Integrate with Presence." "description": "Integrate with presence."
}, },
"indoor_integration": { "indoor_integration": {
"name": "Indoor air quality", "name": "Indoor air quality",
@ -468,7 +468,7 @@
}, },
"fan_mode": { "fan_mode": {
"name": "Fan mode", "name": "Fan mode",
"description": "set fan mode." "description": "Set fan mode."
}, },
"swing_mode": { "swing_mode": {
"name": "Swing mode", "name": "Swing mode",

View File

@ -19,7 +19,7 @@
"delivered": { "delivered": {
"default": "mdi:package" "default": "mdi:package"
}, },
"returned": { "alert": {
"default": "mdi:package" "default": "mdi:package"
}, },
"package": { "package": {

View File

@ -11,7 +11,7 @@ get_packages:
- "ready_to_be_picked_up" - "ready_to_be_picked_up"
- "undelivered" - "undelivered"
- "delivered" - "delivered"
- "returned" - "alert"
translation_key: package_state translation_key: package_state
config_entry_id: config_entry_id:
required: true required: true

View File

@ -57,8 +57,8 @@
"delivered": { "delivered": {
"name": "Delivered" "name": "Delivered"
}, },
"returned": { "alert": {
"name": "Returned" "name": "Alert"
}, },
"package": { "package": {
"name": "Package {name}" "name": "Package {name}"
@ -68,7 +68,7 @@
"services": { "services": {
"get_packages": { "get_packages": {
"name": "Get packages", "name": "Get packages",
"description": "Get packages from 17Track", "description": "Queries the 17track API for the latest package data.",
"fields": { "fields": {
"package_state": { "package_state": {
"name": "Package states", "name": "Package states",
@ -82,7 +82,7 @@
}, },
"archive_package": { "archive_package": {
"name": "Archive package", "name": "Archive package",
"description": "Archive a package", "description": "Archives a package using the 17track API.",
"fields": { "fields": {
"package_tracking_number": { "package_tracking_number": {
"name": "Package tracking number", "name": "Package tracking number",
@ -104,7 +104,7 @@
"ready_to_be_picked_up": "[%key:component::seventeentrack::entity::sensor::ready_to_be_picked_up::name%]", "ready_to_be_picked_up": "[%key:component::seventeentrack::entity::sensor::ready_to_be_picked_up::name%]",
"undelivered": "[%key:component::seventeentrack::entity::sensor::undelivered::name%]", "undelivered": "[%key:component::seventeentrack::entity::sensor::undelivered::name%]",
"delivered": "[%key:component::seventeentrack::entity::sensor::delivered::name%]", "delivered": "[%key:component::seventeentrack::entity::sensor::delivered::name%]",
"returned": "[%key:component::seventeentrack::entity::sensor::returned::name%]" "alert": "[%key:component::seventeentrack::entity::sensor::alert::name%]"
} }
} }
} }

View File

@ -39,7 +39,7 @@ from simplipy.websocket import (
) )
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ( from homeassistant.const import (
ATTR_CODE, ATTR_CODE,
ATTR_DEVICE_ID, ATTR_DEVICE_ID,
@ -402,12 +402,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok: if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id) hass.data[DOMAIN].pop(entry.entry_id)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# If this is the last loaded instance of SimpliSafe, deregister any services # If this is the last loaded instance of SimpliSafe, deregister any services
# defined during integration setup: # defined during integration setup:
for service_name in SERVICES: for service_name in SERVICES:

View File

@ -7,5 +7,5 @@
"integration_type": "hub", "integration_type": "hub",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["pymodbus", "pysmarty2"], "loggers": ["pymodbus", "pysmarty2"],
"requirements": ["pysmarty2==0.10.1"] "requirements": ["pysmarty2==0.10.2"]
} }

View File

@ -2,7 +2,7 @@
"config": { "config": {
"step": { "step": {
"user": { "user": {
"description": "Set up SMLIGHT Zigbee Integration", "description": "Set up SMLIGHT Zigbee integration",
"data": { "data": {
"host": "[%key:common::config_flow::data::host%]" "host": "[%key:common::config_flow::data::host%]"
}, },
@ -111,7 +111,7 @@
"name": "Zigbee flash mode" "name": "Zigbee flash mode"
}, },
"reconnect_zigbee_router": { "reconnect_zigbee_router": {
"name": "Reconnect zigbee router" "name": "Reconnect Zigbee router"
} }
}, },
"switch": { "switch": {

View File

@ -71,7 +71,7 @@ class SomaEntity(Entity):
self.api_is_available = True self.api_is_available = True
@property @property
def available(self): def available(self) -> bool:
"""Return true if the last API commands returned successfully.""" """Return true if the last API commands returned successfully."""
return self.is_available return self.is_available

View File

@ -129,10 +129,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
server_coordinator = LMSStatusDataUpdateCoordinator(hass, entry, lms) server_coordinator = LMSStatusDataUpdateCoordinator(hass, entry, lms)
entry.runtime_data = SqueezeboxData( entry.runtime_data = SqueezeboxData(coordinator=server_coordinator, server=lms)
coordinator=server_coordinator,
server=lms,
)
# set up player discovery # set up player discovery
known_servers = hass.data.setdefault(DOMAIN, {}).setdefault(KNOWN_SERVERS, {}) known_servers = hass.data.setdefault(DOMAIN, {}).setdefault(KNOWN_SERVERS, {})

View File

@ -81,11 +81,12 @@ CONTENT_TYPE_TO_CHILD_TYPE = {
"New Music": MediaType.ALBUM, "New Music": MediaType.ALBUM,
} }
BROWSE_LIMIT = 1000
async def build_item_response( async def build_item_response(
entity: MediaPlayerEntity, player: Player, payload: dict[str, str | None] entity: MediaPlayerEntity,
player: Player,
payload: dict[str, str | None],
browse_limit: int,
) -> BrowseMedia: ) -> BrowseMedia:
"""Create response payload for search described by payload.""" """Create response payload for search described by payload."""
@ -107,7 +108,7 @@ async def build_item_response(
result = await player.async_browse( result = await player.async_browse(
MEDIA_TYPE_TO_SQUEEZEBOX[search_type], MEDIA_TYPE_TO_SQUEEZEBOX[search_type],
limit=BROWSE_LIMIT, limit=browse_limit,
browse_id=browse_id, browse_id=browse_id,
) )
@ -237,7 +238,11 @@ def media_source_content_filter(item: BrowseMedia) -> bool:
return item.media_content_type.startswith("audio/") return item.media_content_type.startswith("audio/")
async def generate_playlist(player: Player, payload: dict[str, str]) -> list | None: async def generate_playlist(
player: Player,
payload: dict[str, str],
browse_limit: int,
) -> list | None:
"""Generate playlist from browsing payload.""" """Generate playlist from browsing payload."""
media_type = payload["search_type"] media_type = payload["search_type"]
media_id = payload["search_id"] media_id = payload["search_id"]
@ -247,7 +252,7 @@ async def generate_playlist(player: Player, payload: dict[str, str]) -> list | N
browse_id = (SQUEEZEBOX_ID_BY_TYPE[media_type], media_id) browse_id = (SQUEEZEBOX_ID_BY_TYPE[media_type], media_id)
result = await player.async_browse( result = await player.async_browse(
"titles", limit=BROWSE_LIMIT, browse_id=browse_id "titles", limit=browse_limit, browse_id=browse_id
) )
if result and "items" in result: if result and "items" in result:
items: list = result["items"] items: list = result["items"]

View File

@ -11,15 +11,34 @@ from pysqueezebox import Server, async_discover
import voluptuous as vol import voluptuous as vol
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.data_entry_flow import AbortFlow from homeassistant.data_entry_flow import AbortFlow
from homeassistant.helpers import entity_registry as er from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.selector import (
NumberSelector,
NumberSelectorConfig,
NumberSelectorMode,
)
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from .const import CONF_HTTPS, DEFAULT_PORT, DOMAIN from .const import (
CONF_BROWSE_LIMIT,
CONF_HTTPS,
CONF_VOLUME_STEP,
DEFAULT_BROWSE_LIMIT,
DEFAULT_PORT,
DEFAULT_VOLUME_STEP,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -77,6 +96,12 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN):
self.data_schema = _base_schema() self.data_schema = _base_schema()
self.discovery_info: dict[str, Any] | None = None self.discovery_info: dict[str, Any] | None = None
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler:
"""Get the options flow for this handler."""
return OptionsFlowHandler()
async def _discover(self, uuid: str | None = None) -> None: async def _discover(self, uuid: str | None = None) -> None:
"""Discover an unconfigured LMS server.""" """Discover an unconfigured LMS server."""
self.discovery_info = None self.discovery_info = None
@ -222,3 +247,48 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN):
# if the player is unknown, then we likely need to configure its server # if the player is unknown, then we likely need to configure its server
return await self.async_step_user() return await self.async_step_user()
OPTIONS_SCHEMA = vol.Schema(
{
vol.Required(CONF_BROWSE_LIMIT): vol.All(
NumberSelector(
NumberSelectorConfig(min=1, max=65534, mode=NumberSelectorMode.BOX)
),
vol.Coerce(int),
),
vol.Required(CONF_VOLUME_STEP): vol.All(
NumberSelector(
NumberSelectorConfig(min=1, max=20, mode=NumberSelectorMode.SLIDER)
),
vol.Coerce(int),
),
}
)
class OptionsFlowHandler(OptionsFlow):
"""Options Flow Handler."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Options Flow Steps."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
OPTIONS_SCHEMA,
{
CONF_BROWSE_LIMIT: self.config_entry.options.get(
CONF_BROWSE_LIMIT, DEFAULT_BROWSE_LIMIT
),
CONF_VOLUME_STEP: self.config_entry.options.get(
CONF_VOLUME_STEP, DEFAULT_VOLUME_STEP
),
},
),
)

View File

@ -32,3 +32,7 @@ SIGNAL_PLAYER_DISCOVERED = "squeezebox_player_discovered"
SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered"
DISCOVERY_INTERVAL = 60 DISCOVERY_INTERVAL = 60
PLAYER_UPDATE_INTERVAL = 5 PLAYER_UPDATE_INTERVAL = 5
CONF_BROWSE_LIMIT = "browse_limit"
CONF_VOLUME_STEP = "volume_step"
DEFAULT_BROWSE_LIMIT = 1000
DEFAULT_VOLUME_STEP = 5

View File

@ -52,6 +52,10 @@ from .browse_media import (
media_source_content_filter, media_source_content_filter,
) )
from .const import ( from .const import (
CONF_BROWSE_LIMIT,
CONF_VOLUME_STEP,
DEFAULT_BROWSE_LIMIT,
DEFAULT_VOLUME_STEP,
DISCOVERY_TASK, DISCOVERY_TASK,
DOMAIN, DOMAIN,
KNOWN_PLAYERS, KNOWN_PLAYERS,
@ -166,6 +170,7 @@ class SqueezeBoxMediaPlayerEntity(
| MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PAUSE
| MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_SET
| MediaPlayerEntityFeature.VOLUME_MUTE | MediaPlayerEntityFeature.VOLUME_MUTE
| MediaPlayerEntityFeature.VOLUME_STEP
| MediaPlayerEntityFeature.PREVIOUS_TRACK | MediaPlayerEntityFeature.PREVIOUS_TRACK
| MediaPlayerEntityFeature.NEXT_TRACK | MediaPlayerEntityFeature.NEXT_TRACK
| MediaPlayerEntityFeature.SEEK | MediaPlayerEntityFeature.SEEK
@ -184,10 +189,7 @@ class SqueezeBoxMediaPlayerEntity(
_attr_name = None _attr_name = None
_last_update: datetime | None = None _last_update: datetime | None = None
def __init__( def __init__(self, coordinator: SqueezeBoxPlayerUpdateCoordinator) -> None:
self,
coordinator: SqueezeBoxPlayerUpdateCoordinator,
) -> None:
"""Initialize the SqueezeBox device.""" """Initialize the SqueezeBox device."""
super().__init__(coordinator) super().__init__(coordinator)
player = coordinator.player player = coordinator.player
@ -223,6 +225,23 @@ class SqueezeBoxMediaPlayerEntity(
self._last_update = utcnow() self._last_update = utcnow()
self.async_write_ha_state() self.async_write_ha_state()
@property
def volume_step(self) -> float:
"""Return the step to be used for volume up down."""
return float(
self.coordinator.config_entry.options.get(
CONF_VOLUME_STEP, DEFAULT_VOLUME_STEP
)
/ 100
)
@property
def browse_limit(self) -> int:
"""Return the step to be used for volume up down."""
return self.coordinator.config_entry.options.get(
CONF_BROWSE_LIMIT, DEFAULT_BROWSE_LIMIT
)
@property @property
def available(self) -> bool: def available(self) -> bool:
"""Return True if entity is available.""" """Return True if entity is available."""
@ -366,16 +385,6 @@ class SqueezeBoxMediaPlayerEntity(
await self._player.async_set_power(False) await self._player.async_set_power(False)
await self.coordinator.async_refresh() await self.coordinator.async_refresh()
async def async_volume_up(self) -> None:
"""Volume up media player."""
await self._player.async_set_volume("+5")
await self.coordinator.async_refresh()
async def async_volume_down(self) -> None:
"""Volume down media player."""
await self._player.async_set_volume("-5")
await self.coordinator.async_refresh()
async def async_set_volume_level(self, volume: float) -> None: async def async_set_volume_level(self, volume: float) -> None:
"""Set volume level, range 0..1.""" """Set volume level, range 0..1."""
volume_percent = str(int(volume * 100)) volume_percent = str(int(volume * 100))
@ -466,7 +475,11 @@ class SqueezeBoxMediaPlayerEntity(
"search_id": media_id, "search_id": media_id,
"search_type": MediaType.PLAYLIST, "search_type": MediaType.PLAYLIST,
} }
playlist = await generate_playlist(self._player, payload) playlist = await generate_playlist(
self._player,
payload,
self.browse_limit,
)
except BrowseError: except BrowseError:
# a list of urls # a list of urls
content = json.loads(media_id) content = json.loads(media_id)
@ -477,7 +490,11 @@ class SqueezeBoxMediaPlayerEntity(
"search_id": media_id, "search_id": media_id,
"search_type": media_type, "search_type": media_type,
} }
playlist = await generate_playlist(self._player, payload) playlist = await generate_playlist(
self._player,
payload,
self.browse_limit,
)
_LOGGER.debug("Generated playlist: %s", playlist) _LOGGER.debug("Generated playlist: %s", playlist)
@ -587,7 +604,12 @@ class SqueezeBoxMediaPlayerEntity(
"search_id": media_content_id, "search_id": media_content_id,
} }
return await build_item_response(self, self._player, payload) return await build_item_response(
self,
self._player,
payload,
self.browse_limit,
)
async def async_get_browse_image( async def async_get_browse_image(
self, self,

View File

@ -103,5 +103,20 @@
"unit_of_measurement": "[%key:component::squeezebox::entity::sensor::player_count::unit_of_measurement%]" "unit_of_measurement": "[%key:component::squeezebox::entity::sensor::player_count::unit_of_measurement%]"
} }
} }
},
"options": {
"step": {
"init": {
"title": "LMS Configuration",
"data": {
"browse_limit": "Browse limit",
"volume_step": "Volume step"
},
"data_description": {
"browse_limit": "Maximum number of items when browsing or in a playlist.",
"volume_step": "Amount to adjust the volume when turning volume up or down."
}
}
}
} }
} }

View File

@ -27,20 +27,20 @@ class StarlineEntity(Entity):
self._unsubscribe_api: Callable | None = None self._unsubscribe_api: Callable | None = None
@property @property
def available(self): def available(self) -> bool:
"""Return True if entity is available.""" """Return True if entity is available."""
return self._account.api.available return self._account.api.available
def update(self): def update(self) -> None:
"""Read new state data.""" """Read new state data."""
self.schedule_update_ha_state() self.schedule_update_ha_state()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Call when entity about to be added to Home Assistant.""" """Call when entity about to be added to Home Assistant."""
await super().async_added_to_hass() await super().async_added_to_hass()
self._unsubscribe_api = self._account.api.add_update_listener(self.update) self._unsubscribe_api = self._account.api.add_update_listener(self.update)
async def async_will_remove_from_hass(self): async def async_will_remove_from_hass(self) -> None:
"""Call when entity is being removed from Home Assistant.""" """Call when entity is being removed from Home Assistant."""
await super().async_will_remove_from_hass() await super().async_will_remove_from_hass()
if self._unsubscribe_api is not None: if self._unsubscribe_api is not None:

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/stookwijzer", "documentation": "https://www.home-assistant.io/integrations/stookwijzer",
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["stookwijzer==1.5.2"] "requirements": ["stookwijzer==1.5.4"]
} }

View File

@ -61,7 +61,7 @@ class SwitchbotEntity(
return self.coordinator.device.parsed_data return self.coordinator.device.parsed_data
@property @property
def extra_state_attributes(self) -> Mapping[Any, Any]: def extra_state_attributes(self) -> Mapping[str, Any]:
"""Return the state attributes.""" """Return the state attributes."""
return {"last_run_success": self._last_run_success} return {"last_run_success": self._last_run_success}

View File

@ -14,6 +14,7 @@ from homeassistant.components.backup import (
AgentBackup, AgentBackup,
BackupAgent, BackupAgent,
BackupAgentError, BackupAgentError,
BackupNotFound,
suggested_filename, suggested_filename,
) )
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
@ -101,6 +102,7 @@ class SynologyDSMBackupAgent(BackupAgent):
) )
syno_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] syno_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id]
self.api = syno_data.api self.api = syno_data.api
self.backup_base_names: dict[str, str] = {}
@property @property
def _file_station(self) -> SynoFileStation: def _file_station(self) -> SynoFileStation:
@ -109,18 +111,19 @@ class SynologyDSMBackupAgent(BackupAgent):
assert self.api.file_station assert self.api.file_station
return self.api.file_station return self.api.file_station
async def _async_suggested_filenames( async def _async_backup_filenames(
self, self,
backup_id: str, backup_id: str,
) -> tuple[str, str]: ) -> tuple[str, str]:
"""Suggest filenames for the backup. """Return the actual backup filenames.
:param backup_id: The ID of the backup that was returned in async_list_backups. :param backup_id: The ID of the backup that was returned in async_list_backups.
:return: A tuple of tar_filename and meta_filename :return: A tuple of tar_filename and meta_filename
""" """
if (backup := await self.async_get_backup(backup_id)) is None: if await self.async_get_backup(backup_id) is None:
raise BackupAgentError("Backup not found") raise BackupNotFound
return suggested_filenames(backup) base_name = self.backup_base_names[backup_id]
return (f"{base_name}.tar", f"{base_name}_meta.json")
async def async_download_backup( async def async_download_backup(
self, self,
@ -132,7 +135,7 @@ class SynologyDSMBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups. :param backup_id: The ID of the backup that was returned in async_list_backups.
:return: An async iterator that yields bytes. :return: An async iterator that yields bytes.
""" """
(filename_tar, _) = await self._async_suggested_filenames(backup_id) (filename_tar, _) = await self._async_backup_filenames(backup_id)
try: try:
resp = await self._file_station.download_file( resp = await self._file_station.download_file(
@ -193,7 +196,7 @@ class SynologyDSMBackupAgent(BackupAgent):
:param backup_id: The ID of the backup that was returned in async_list_backups. :param backup_id: The ID of the backup that was returned in async_list_backups.
""" """
try: try:
(filename_tar, filename_meta) = await self._async_suggested_filenames( (filename_tar, filename_meta) = await self._async_backup_filenames(
backup_id backup_id
) )
except BackupAgentError: except BackupAgentError:
@ -247,6 +250,7 @@ class SynologyDSMBackupAgent(BackupAgent):
assert files assert files
backups: dict[str, AgentBackup] = {} backups: dict[str, AgentBackup] = {}
backup_base_names: dict[str, str] = {}
for file in files: for file in files:
if file.name.endswith("_meta.json"): if file.name.endswith("_meta.json"):
try: try:
@ -255,7 +259,10 @@ class SynologyDSMBackupAgent(BackupAgent):
LOGGER.error("Failed to download meta data: %s", err) LOGGER.error("Failed to download meta data: %s", err)
continue continue
agent_backup = AgentBackup.from_dict(meta_data) agent_backup = AgentBackup.from_dict(meta_data)
backups[agent_backup.backup_id] = agent_backup backup_id = agent_backup.backup_id
backups[backup_id] = agent_backup
backup_base_names[backup_id] = file.name.replace("_meta.json", "")
self.backup_base_names = backup_base_names
return backups return backups
async def async_get_backup( async def async_get_backup(

View File

@ -35,13 +35,17 @@ from homeassistant.const import (
) )
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import ( from .const import (
CONF_BACKUP_PATH,
CONF_DEVICE_TOKEN, CONF_DEVICE_TOKEN,
DEFAULT_TIMEOUT, DEFAULT_TIMEOUT,
DOMAIN,
EXCEPTION_DETAILS, EXCEPTION_DETAILS,
EXCEPTION_UNKNOWN, EXCEPTION_UNKNOWN,
ISSUE_MISSING_BACKUP_SETUP,
SYNOLOGY_CONNECTION_EXCEPTIONS, SYNOLOGY_CONNECTION_EXCEPTIONS,
) )
@ -174,6 +178,19 @@ class SynoApi:
" permissions or no writable shared folders available" " permissions or no writable shared folders available"
) )
if shares and not self._entry.options.get(CONF_BACKUP_PATH):
ir.async_create_issue(
self._hass,
DOMAIN,
f"{ISSUE_MISSING_BACKUP_SETUP}_{self._entry.unique_id}",
data={"entry_id": self._entry.entry_id},
is_fixable=True,
is_persistent=False,
severity=ir.IssueSeverity.WARNING,
translation_key=ISSUE_MISSING_BACKUP_SETUP,
translation_placeholders={"title": self._entry.title},
)
LOGGER.debug( LOGGER.debug(
"State of File Station during setup of '%s': %s", "State of File Station during setup of '%s': %s",
self._entry.unique_id, self._entry.unique_id,

View File

@ -35,6 +35,8 @@ PLATFORMS = [
EXCEPTION_DETAILS = "details" EXCEPTION_DETAILS = "details"
EXCEPTION_UNKNOWN = "unknown" EXCEPTION_UNKNOWN = "unknown"
ISSUE_MISSING_BACKUP_SETUP = "missing_backup_setup"
# Configuration # Configuration
CONF_SERIAL = "serial" CONF_SERIAL = "serial"
CONF_VOLUMES = "volumes" CONF_VOLUMES = "volumes"

View File

@ -0,0 +1,125 @@
"""Repair flows for the Synology DSM integration."""
from __future__ import annotations
from contextlib import suppress
import logging
from typing import cast
from synology_dsm.api.file_station.models import SynoFileSharedFolder
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import (
CONF_BACKUP_PATH,
CONF_BACKUP_SHARE,
DOMAIN,
ISSUE_MISSING_BACKUP_SETUP,
SYNOLOGY_CONNECTION_EXCEPTIONS,
)
from .models import SynologyDSMData
LOGGER = logging.getLogger(__name__)
class MissingBackupSetupRepairFlow(RepairsFlow):
"""Handler for an issue fixing flow."""
def __init__(self, entry: ConfigEntry, issue_id: str) -> None:
"""Create flow."""
self.entry = entry
self.issue_id = issue_id
super().__init__()
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the first step of a fix flow."""
return self.async_show_menu(
menu_options=["confirm", "ignore"],
description_placeholders={
"docs_url": "https://www.home-assistant.io/integrations/synology_dsm/#backup-location"
},
)
async def async_step_confirm(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the confirm step of a fix flow."""
syno_data: SynologyDSMData = self.hass.data[DOMAIN][self.entry.unique_id]
if user_input is not None:
self.hass.config_entries.async_update_entry(
self.entry, options={**dict(self.entry.options), **user_input}
)
return self.async_create_entry(data={})
shares: list[SynoFileSharedFolder] | None = None
if syno_data.api.file_station:
with suppress(*SYNOLOGY_CONNECTION_EXCEPTIONS):
shares = await syno_data.api.file_station.get_shared_folders(
only_writable=True
)
if not shares:
return self.async_abort(reason="no_shares")
return self.async_show_form(
data_schema=vol.Schema(
{
vol.Required(
CONF_BACKUP_SHARE,
default=self.entry.options[CONF_BACKUP_SHARE],
): SelectSelector(
SelectSelectorConfig(
options=[
SelectOptionDict(value=s.path, label=s.name)
for s in shares
],
mode=SelectSelectorMode.DROPDOWN,
),
),
vol.Required(
CONF_BACKUP_PATH,
default=self.entry.options[CONF_BACKUP_PATH],
): str,
}
),
)
async def async_step_ignore(
self, _: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the confirm step of a fix flow."""
ir.async_ignore_issue(self.hass, DOMAIN, self.issue_id, True)
return self.async_abort(reason="ignored")
async def async_create_fix_flow(
hass: HomeAssistant,
issue_id: str,
data: dict[str, str | int | float | None] | None,
) -> RepairsFlow:
"""Create flow."""
entry = None
if data and (entry_id := data.get("entry_id")):
entry_id = cast(str, entry_id)
entry = hass.config_entries.async_get_entry(entry_id)
if entry and issue_id.startswith(ISSUE_MISSING_BACKUP_SETUP):
return MissingBackupSetupRepairFlow(entry, issue_id)
return ConfirmRepairFlow()

View File

@ -185,6 +185,37 @@
} }
} }
}, },
"issues": {
"missing_backup_setup": {
"title": "Backup location not configured for {title}",
"fix_flow": {
"step": {
"init": {
"description": "The backup location for {title} is not configured. Do you want to set it up now? Details can be found in the integration documentation under [Backup Location]({docs_url})",
"menu_options": {
"confirm": "Set up the backup location now",
"ignore": "Don't set it up now"
}
},
"confirm": {
"title": "[%key:component::synology_dsm::config::step::backup_share::title%]",
"data": {
"backup_share": "[%key:component::synology_dsm::config::step::backup_share::data::backup_share%]",
"backup_path": "[%key:component::synology_dsm::config::step::backup_share::data::backup_path%]"
},
"data_description": {
"backup_share": "[%key:component::synology_dsm::config::step::backup_share::data_description::backup_share%]",
"backup_path": "[%key:component::synology_dsm::config::step::backup_share::data_description::backup_path%]"
}
}
},
"abort": {
"no_shares": "There are no shared folders available for the user.\nPlease check the documentation.",
"ignored": "The backup location has not been configured.\nYou can still set it up later via the integration options."
}
}
}
},
"services": { "services": {
"reboot": { "reboot": {
"name": "Reboot", "name": "Reboot",

View File

@ -33,7 +33,7 @@ class TelldusLiveEntity(Entity):
self._id = device_id self._id = device_id
self._client = client self._client = client
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Call when entity is added to hass.""" """Call when entity is added to hass."""
_LOGGER.debug("Created device %s", self) _LOGGER.debug("Created device %s", self)
self.async_on_remove( self.async_on_remove(
@ -58,12 +58,12 @@ class TelldusLiveEntity(Entity):
return self.device.state return self.device.state
@property @property
def assumed_state(self): def assumed_state(self) -> bool:
"""Return true if unable to access real state of entity.""" """Return true if unable to access real state of entity."""
return True return True
@property @property
def available(self): def available(self) -> bool:
"""Return true if device is not offline.""" """Return true if device is not offline."""
return self._client.is_available(self.device_id) return self._client.is_available(self.device_id)

View File

@ -40,7 +40,7 @@ class TellstickDevice(Entity):
self._attr_name = tellcore_device.name self._attr_name = tellcore_device.name
self._attr_unique_id = tellcore_device.id self._attr_unique_id = tellcore_device.id
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register callbacks.""" """Register callbacks."""
self.async_on_remove( self.async_on_remove(
async_dispatcher_connect( async_dispatcher_connect(
@ -146,6 +146,6 @@ class TellstickDevice(Entity):
except TelldusError as err: except TelldusError as err:
_LOGGER.error(err) _LOGGER.error(err)
def update(self): def update(self) -> None:
"""Poll the current state of the device.""" """Poll the current state of the device."""
self._update_from_tellcore() self._update_from_tellcore()

View File

@ -712,7 +712,7 @@
"name": "Navigate to coordinates" "name": "Navigate to coordinates"
}, },
"set_scheduled_charging": { "set_scheduled_charging": {
"description": "Sets a time at which charging should be completed.", "description": "Sets a time at which charging should be started.",
"fields": { "fields": {
"device_id": { "device_id": {
"description": "Vehicle to schedule.", "description": "Vehicle to schedule.",

View File

@ -11,7 +11,7 @@ from tplink_omada_client.exceptions import (
UnsupportedControllerVersion, UnsupportedControllerVersion,
) )
from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
@ -80,12 +80,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> boo
async def async_unload_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
loaded_entries = [ if not hass.config_entries.async_loaded_entries(DOMAIN):
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.state == ConfigEntryState.LOADED
]
if len(loaded_entries) == 1:
# This is the last loaded instance of Omada, deregister any services # This is the last loaded instance of Omada, deregister any services
hass.services.async_remove(DOMAIN, "reconnect_client") hass.services.async_remove(DOMAIN, "reconnect_client")

View File

@ -30,7 +30,7 @@ class UpbEntity(Entity):
return self._element.as_dict() return self._element.as_dict()
@property @property
def available(self): def available(self) -> bool:
"""Is the entity available to be updated.""" """Is the entity available to be updated."""
return self._upb.is_connected() return self._upb.is_connected()
@ -43,7 +43,7 @@ class UpbEntity(Entity):
self._element_changed(element, changeset) self._element_changed(element, changeset)
self.async_write_ha_state() self.async_write_ha_state()
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Register callback for UPB changes and update entity state.""" """Register callback for UPB changes and update entity state."""
self._element.add_callback(self._element_callback) self._element.add_callback(self._element_callback)
self._element_callback(self._element, {}) self._element_callback(self._element, {})

View File

@ -31,6 +31,6 @@ class VeluxEntity(Entity):
self.node.register_device_updated_cb(after_update_callback) self.node.register_device_updated_cb(after_update_callback)
async def async_added_to_hass(self): async def async_added_to_hass(self) -> None:
"""Store register state change callback.""" """Store register state change callback."""
self.async_register_callbacks() self.async_register_callbacks()

View File

@ -52,7 +52,7 @@ class VeraEntity[_DeviceTypeT: veraApi.VeraDevice](Entity):
"""Update the state.""" """Update the state."""
self.schedule_update_ha_state(True) self.schedule_update_ha_state(True)
def update(self): def update(self) -> None:
"""Force a refresh from the device if the device is unavailable.""" """Force a refresh from the device if the device is unavailable."""
refresh_needed = self.vera_device.should_poll or not self.available refresh_needed = self.vera_device.should_poll or not self.available
_LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed) _LOGGER.debug("%s: update called (refresh=%s)", self._name, refresh_needed)
@ -90,7 +90,7 @@ class VeraEntity[_DeviceTypeT: veraApi.VeraDevice](Entity):
return attr return attr
@property @property
def available(self): def available(self) -> bool:
"""If device communications have failed return false.""" """If device communications have failed return false."""
return not self.vera_device.comm_failure return not self.vera_device.comm_failure

View File

@ -63,6 +63,7 @@ SKU_TO_BASE_DEVICE = {
# Air Purifiers # Air Purifiers
"LV-PUR131S": "LV-PUR131S", "LV-PUR131S": "LV-PUR131S",
"LV-RH131S": "LV-PUR131S", # Alt ID Model LV-PUR131S "LV-RH131S": "LV-PUR131S", # Alt ID Model LV-PUR131S
"LV-RH131S-WM": "LV-PUR131S", # Alt ID Model LV-PUR131S
"Core200S": "Core200S", "Core200S": "Core200S",
"LAP-C201S-AUSR": "Core200S", # Alt ID Model Core200S "LAP-C201S-AUSR": "Core200S", # Alt ID Model Core200S
"LAP-C202S-WUSR": "Core200S", # Alt ID Model Core200S "LAP-C202S-WUSR": "Core200S", # Alt ID Model Core200S

View File

@ -12,5 +12,5 @@
"documentation": "https://www.home-assistant.io/integrations/vesync", "documentation": "https://www.home-assistant.io/integrations/vesync",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["pyvesync"], "loggers": ["pyvesync"],
"requirements": ["pyvesync==2.1.17"] "requirements": ["pyvesync==2.1.18"]
} }

View File

@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/vicare", "documentation": "https://www.home-assistant.io/integrations/vicare",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["PyViCare"], "loggers": ["PyViCare"],
"requirements": ["PyViCare==2.43.0"] "requirements": ["PyViCare==2.43.1"]
} }

View File

@ -57,7 +57,7 @@ class VolvoEntity(CoordinatorEntity[VolvoUpdateCoordinator]):
return f"{self._vehicle_name} {self._entity_name}" return f"{self._vehicle_name} {self._entity_name}"
@property @property
def assumed_state(self): def assumed_state(self) -> bool:
"""Return true if unable to access real state of entity.""" """Return true if unable to access real state of entity."""
return True return True

Some files were not shown because too many files have changed in this diff Show More