mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 01:08:12 +00:00
Merge remote-tracking branch 'upstream/dev' into esphome_bronze
This commit is contained in:
commit
0794bfbdcf
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@ -432,7 +432,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||
/homeassistant/components/ephember/ @ttroy50
|
||||
/homeassistant/components/ephember/ @ttroy50 @roberty99
|
||||
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/tests/components/epic_games_store/ @hacf-fr @Quentame
|
||||
/homeassistant/components/epion/ @lhgravendeel
|
||||
|
@ -120,6 +120,7 @@ class AppleTvMediaPlayer(
|
||||
"""Initialize the Apple TV media player."""
|
||||
super().__init__(name, identifier, manager)
|
||||
self._playing: Playing | None = None
|
||||
self._playing_last_updated: datetime | None = None
|
||||
self._app_list: dict[str, str] = {}
|
||||
|
||||
@callback
|
||||
@ -209,6 +210,7 @@ class AppleTvMediaPlayer(
|
||||
This is a callback function from pyatv.interface.PushListener.
|
||||
"""
|
||||
self._playing = playstatus
|
||||
self._playing_last_updated = dt_util.utcnow()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
@ -316,7 +318,7 @@ class AppleTvMediaPlayer(
|
||||
def media_position_updated_at(self) -> datetime | None:
|
||||
"""Last valid time of media position."""
|
||||
if self.state in {MediaPlayerState.PLAYING, MediaPlayerState.PAUSED}:
|
||||
return dt_util.utcnow()
|
||||
return self._playing_last_updated
|
||||
return None
|
||||
|
||||
async def async_play_media(
|
||||
|
@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.4.5",
|
||||
"bluetooth-data-tools==1.27.0",
|
||||
"dbus-fast==2.43.0",
|
||||
"habluetooth==3.38.1"
|
||||
"habluetooth==3.39.0"
|
||||
]
|
||||
}
|
||||
|
@ -41,6 +41,7 @@ async def async_setup_entry(
|
||||
DemoTVShowPlayer(),
|
||||
DemoBrowsePlayer("Browse"),
|
||||
DemoGroupPlayer("Group"),
|
||||
DemoSearchPlayer("Search"),
|
||||
]
|
||||
)
|
||||
|
||||
@ -95,6 +96,8 @@ NETFLIX_PLAYER_SUPPORT = (
|
||||
|
||||
BROWSE_PLAYER_SUPPORT = MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
|
||||
SEARCH_PLAYER_SUPPORT = MediaPlayerEntityFeature.SEARCH_MEDIA
|
||||
|
||||
|
||||
class AbstractDemoPlayer(MediaPlayerEntity):
|
||||
"""A demo media players."""
|
||||
@ -398,3 +401,9 @@ class DemoGroupPlayer(AbstractDemoPlayer):
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
|
||||
class DemoSearchPlayer(AbstractDemoPlayer):
|
||||
"""A Demo media player that supports searching."""
|
||||
|
||||
_attr_supported_features = SEARCH_PLAYER_SUPPORT
|
||||
|
@ -88,6 +88,8 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
):
|
||||
"""Representation of a devolo device tracker."""
|
||||
|
||||
_attr_translation_key = "device_tracker"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DevoloDataUpdateCoordinator[list[ConnectedStationInfo]],
|
||||
@ -123,13 +125,6 @@ class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
)
|
||||
return attrs
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
"""Return device icon."""
|
||||
if self.is_connected:
|
||||
return "mdi:lan-connect"
|
||||
return "mdi:lan-disconnect"
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Return true if the device is connected to the network."""
|
||||
|
@ -13,6 +13,14 @@
|
||||
"default": "mdi:wifi-plus"
|
||||
}
|
||||
},
|
||||
"device_tracker": {
|
||||
"device_tracker": {
|
||||
"default": "mdi:lan-disconnect",
|
||||
"state": {
|
||||
"home": "mdi:lan-connect"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"connected_plc_devices": {
|
||||
"default": "mdi:lan"
|
||||
|
@ -114,9 +114,14 @@ class DevoloSwitchEntity[_DataT: _DataType](
|
||||
translation_key="password_protected",
|
||||
translation_placeholders={"title": self.entry.title},
|
||||
) from ex
|
||||
except DeviceUnavailable:
|
||||
pass # The coordinator will handle this
|
||||
await self.coordinator.async_request_refresh()
|
||||
except DeviceUnavailable as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_response",
|
||||
translation_placeholders={"title": self.entry.title},
|
||||
) from ex
|
||||
finally:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
@ -129,6 +134,11 @@ class DevoloSwitchEntity[_DataT: _DataType](
|
||||
translation_key="password_protected",
|
||||
translation_placeholders={"title": self.entry.title},
|
||||
) from ex
|
||||
except DeviceUnavailable:
|
||||
pass # The coordinator will handle this
|
||||
await self.coordinator.async_request_refresh()
|
||||
except DeviceUnavailable as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="no_response",
|
||||
translation_placeholders={"title": self.entry.title},
|
||||
) from ex
|
||||
finally:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["eheimdigital==1.0.6"],
|
||||
"requirements": ["eheimdigital==1.1.0"],
|
||||
"zeroconf": [
|
||||
{ "type": "_http._tcp.local.", "name": "eheimdigital._http._tcp.local." }
|
||||
]
|
||||
|
@ -6,13 +6,13 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyephember.pyephember import (
|
||||
from pyephember2.pyephember2 import (
|
||||
EphEmber,
|
||||
ZoneMode,
|
||||
zone_current_temperature,
|
||||
zone_is_active,
|
||||
zone_is_boost_active,
|
||||
zone_is_hot_water,
|
||||
zone_is_hotwater,
|
||||
zone_mode,
|
||||
zone_name,
|
||||
zone_target_temperature,
|
||||
@ -69,14 +69,18 @@ def setup_platform(
|
||||
|
||||
try:
|
||||
ember = EphEmber(username, password)
|
||||
zones = ember.get_zones()
|
||||
for zone in zones:
|
||||
add_entities([EphEmberThermostat(ember, zone)])
|
||||
except RuntimeError:
|
||||
_LOGGER.error("Cannot connect to EphEmber")
|
||||
_LOGGER.error("Cannot login to EphEmber")
|
||||
|
||||
try:
|
||||
homes = ember.get_zones()
|
||||
except RuntimeError:
|
||||
_LOGGER.error("Fail to get zones")
|
||||
return
|
||||
|
||||
return
|
||||
add_entities(
|
||||
EphEmberThermostat(ember, zone) for home in homes for zone in home["zones"]
|
||||
)
|
||||
|
||||
|
||||
class EphEmberThermostat(ClimateEntity):
|
||||
@ -85,33 +89,35 @@ class EphEmberThermostat(ClimateEntity):
|
||||
_attr_hvac_modes = OPERATION_LIST
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, ember, zone):
|
||||
def __init__(self, ember, zone) -> None:
|
||||
"""Initialize the thermostat."""
|
||||
self._ember = ember
|
||||
self._zone_name = zone_name(zone)
|
||||
self._zone = zone
|
||||
self._hot_water = zone_is_hot_water(zone)
|
||||
|
||||
# hot water = true, is immersive device without target temperature control.
|
||||
self._hot_water = zone_is_hotwater(zone)
|
||||
|
||||
self._attr_name = self._zone_name
|
||||
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.AUX_HEAT
|
||||
)
|
||||
self._attr_target_temperature_step = 0.5
|
||||
if self._hot_water:
|
||||
self._attr_supported_features = ClimateEntityFeature.AUX_HEAT
|
||||
self._attr_target_temperature_step = None
|
||||
self._attr_supported_features |= (
|
||||
ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
else:
|
||||
self._attr_target_temperature_step = 0.5
|
||||
self._attr_supported_features = (
|
||||
ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
)
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return zone_current_temperature(self._zone)
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return zone_target_temperature(self._zone)
|
||||
|
||||
@ -133,12 +139,12 @@ class EphEmberThermostat(ClimateEntity):
|
||||
"""Set the operation mode."""
|
||||
mode = self.map_mode_hass_eph(hvac_mode)
|
||||
if mode is not None:
|
||||
self._ember.set_mode_by_name(self._zone_name, mode)
|
||||
self._ember.set_zone_mode(self._zone["zoneid"], mode)
|
||||
else:
|
||||
_LOGGER.error("Invalid operation mode provided %s", hvac_mode)
|
||||
|
||||
@property
|
||||
def is_aux_heat(self):
|
||||
def is_aux_heat(self) -> bool:
|
||||
"""Return true if aux heater."""
|
||||
|
||||
return zone_is_boost_active(self._zone)
|
||||
@ -167,7 +173,7 @@ class EphEmberThermostat(ClimateEntity):
|
||||
if temperature > self.max_temp or temperature < self.min_temp:
|
||||
return
|
||||
|
||||
self._ember.set_target_temperture_by_name(self._zone_name, temperature)
|
||||
self._ember.set_zone_target_temperature(self._zone["zoneid"], temperature)
|
||||
|
||||
@property
|
||||
def min_temp(self):
|
||||
@ -188,7 +194,8 @@ class EphEmberThermostat(ClimateEntity):
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
self._zone = self._ember.get_zone(self._zone_name)
|
||||
self._ember.get_zones()
|
||||
self._zone = self._ember.get_zone(self._zone["zoneid"])
|
||||
|
||||
@staticmethod
|
||||
def map_mode_hass_eph(operation_mode):
|
||||
|
@ -1,10 +1,10 @@
|
||||
{
|
||||
"domain": "ephember",
|
||||
"name": "EPH Controls",
|
||||
"codeowners": ["@ttroy50"],
|
||||
"codeowners": ["@ttroy50", "@roberty99"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ephember",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyephember"],
|
||||
"loggers": ["pyephember2"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyephember==0.3.1"]
|
||||
"requirements": ["pyephember2==0.4.12"]
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from aioesphomeapi import APIClient
|
||||
|
||||
from homeassistant.components import ffmpeg, zeroconf
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.components.bluetooth import async_remove_scanner
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@ -17,13 +17,10 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import async_delete_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .dashboard import async_setup as async_setup_dashboard
|
||||
from . import dashboard, ffmpeg_proxy
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DOMAIN
|
||||
from .domain_data import DomainData
|
||||
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .ffmpeg_proxy import FFmpegProxyData, FFmpegProxyView
|
||||
from .manager import DEVICE_CONFLICT_ISSUE_FORMAT, ESPHomeManager, cleanup_instance
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@ -33,12 +30,8 @@ CLIENT_INFO = f"Home Assistant {ha_version}"
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the esphome component."""
|
||||
proxy_data = hass.data[DATA_FFMPEG_PROXY] = FFmpegProxyData()
|
||||
|
||||
await async_setup_dashboard(hass)
|
||||
hass.http.register_view(
|
||||
FFmpegProxyView(ffmpeg.get_ffmpeg_manager(hass), proxy_data)
|
||||
)
|
||||
ffmpeg_proxy.async_setup(hass)
|
||||
await dashboard.async_setup(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
@ -47,6 +47,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
)
|
||||
from .dashboard import async_get_or_create_dashboard_manager, async_set_dashboard_info
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
from .manager import async_replace_device
|
||||
|
||||
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
|
||||
@ -608,7 +609,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: ESPHomeConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler()
|
||||
|
@ -22,5 +22,3 @@ PROJECT_URLS = {
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy"
|
||||
|
@ -28,6 +28,8 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
@ -167,7 +169,12 @@ def convert_api_error_ha_error[**_P, _R, _EntityT: EsphomeEntity[Any, Any]](
|
||||
return await func(self, *args, **kwargs)
|
||||
except APIConnectionError as error:
|
||||
raise HomeAssistantError(
|
||||
f"Error communicating with device: {error}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_communicating_with_device",
|
||||
translation_placeholders={
|
||||
"device_name": self._device_info.name,
|
||||
"error": str(error),
|
||||
},
|
||||
) from error
|
||||
|
||||
return handler
|
||||
@ -194,6 +201,7 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]):
|
||||
_static_info: _InfoT
|
||||
_state: _StateT
|
||||
_has_state: bool
|
||||
device_entry: dr.DeviceEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -11,17 +11,20 @@ from typing import Final
|
||||
from aiohttp import web
|
||||
from aiohttp.abc import AbstractStreamWriter, BaseRequest
|
||||
|
||||
from homeassistant.components import ffmpeg
|
||||
from homeassistant.components.ffmpeg import FFmpegManager
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import DATA_FFMPEG_PROXY
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MAX_CONVERSIONS_PER_DEVICE: Final[int] = 2
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_proxy_url(
|
||||
hass: HomeAssistant,
|
||||
device_id: str,
|
||||
@ -32,7 +35,7 @@ def async_create_proxy_url(
|
||||
width: int | None = None,
|
||||
) -> str:
|
||||
"""Create a use proxy URL that automatically converts the media."""
|
||||
data: FFmpegProxyData = hass.data[DATA_FFMPEG_PROXY]
|
||||
data = hass.data[DATA_FFMPEG_PROXY]
|
||||
return data.async_create_proxy_url(
|
||||
device_id, media_url, media_format, rate, channels, width
|
||||
)
|
||||
@ -313,3 +316,16 @@ class FFmpegProxyView(HomeAssistantView):
|
||||
assert writer is not None
|
||||
await resp.transcode(request, writer)
|
||||
return resp
|
||||
|
||||
|
||||
DATA_FFMPEG_PROXY: HassKey[FFmpegProxyData] = HassKey(f"{DOMAIN}.ffmpeg_proxy")
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the ffmpeg proxy."""
|
||||
proxy_data = FFmpegProxyData()
|
||||
hass.data[DATA_FFMPEG_PROXY] = proxy_data
|
||||
hass.http.register_view(
|
||||
FFmpegProxyView(ffmpeg.get_ffmpeg_manager(hass), proxy_data)
|
||||
)
|
||||
|
20
homeassistant/components/esphome/icons.json
Normal file
20
homeassistant/components/esphome/icons.json
Normal file
@ -0,0 +1,20 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"assist_in_progress": {
|
||||
"default": "mdi:timer-sand"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"pipeline": {
|
||||
"default": "mdi:filter-outline"
|
||||
},
|
||||
"vad_sensitivity": {
|
||||
"default": "mdi:volume-high"
|
||||
},
|
||||
"wake_word": {
|
||||
"default": "mdi:microphone"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==30.0.1",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==2.13.1"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
|
@ -148,10 +148,6 @@ class EsphomeMediaPlayer(
|
||||
announcement: bool,
|
||||
) -> str | None:
|
||||
"""Get URL for ffmpeg proxy."""
|
||||
if self.device_entry is None:
|
||||
# Device id is required
|
||||
return None
|
||||
|
||||
# Choose the first default or announcement supported format
|
||||
format_to_use: MediaPlayerSupportedFormat | None = None
|
||||
for supported_format in supported_formats:
|
||||
|
@ -20,13 +20,13 @@ from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .entity import EsphomeEntity, platform_async_setup_entry
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
from .enum_mapper import EsphomeEnumMapper
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -34,7 +34,7 @@ PARALLEL_UPDATES = 0
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: ESPHomeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up esphome sensors based on a config entry."""
|
||||
|
@ -184,6 +184,15 @@
|
||||
"exceptions": {
|
||||
"action_call_failed": {
|
||||
"message": "Failed to execute the action call {call_name} on {device_name}: {error}"
|
||||
},
|
||||
"error_communicating_with_device": {
|
||||
"message": "Error communicating with the device {device_name}: {error}"
|
||||
},
|
||||
"error_compiling": {
|
||||
"message": "Error compiling {configuration}; Try again in ESPHome dashboard for more information."
|
||||
},
|
||||
"error_uploading": {
|
||||
"message": "Error during OTA of {configuration}; Try again in ESPHome dashboard for more information."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ from homeassistant.components.update import (
|
||||
UpdateEntity,
|
||||
UpdateEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
@ -27,6 +26,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ESPHomeDashboardCoordinator
|
||||
from .dashboard import async_get_dashboard
|
||||
from .domain_data import DomainData
|
||||
@ -36,7 +36,7 @@ from .entity import (
|
||||
esphome_state_property,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
from .entry_data import RuntimeEntryData
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@ -47,7 +47,7 @@ NO_FEATURES = UpdateEntityFeature(0)
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: ESPHomeConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up ESPHome update based on a config entry."""
|
||||
@ -202,16 +202,23 @@ class ESPHomeDashboardUpdateEntity(
|
||||
api = coordinator.api
|
||||
device = coordinator.data.get(self._device_info.name)
|
||||
assert device is not None
|
||||
configuration = device["configuration"]
|
||||
try:
|
||||
if not await api.compile(device["configuration"]):
|
||||
if not await api.compile(configuration):
|
||||
raise HomeAssistantError(
|
||||
f"Error compiling {device['configuration']}; "
|
||||
"Try again in ESPHome dashboard for more information."
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_compiling",
|
||||
translation_placeholders={
|
||||
"configuration": configuration,
|
||||
},
|
||||
)
|
||||
if not await api.upload(device["configuration"], "OTA"):
|
||||
if not await api.upload(configuration, "OTA"):
|
||||
raise HomeAssistantError(
|
||||
f"Error updating {device['configuration']} via OTA; "
|
||||
"Try again in ESPHome dashboard for more information."
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="error_uploading",
|
||||
translation_placeholders={
|
||||
"configuration": configuration,
|
||||
},
|
||||
)
|
||||
finally:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
@ -211,6 +211,10 @@ class FibaroController:
|
||||
"""Return list of scenes."""
|
||||
return self._scenes
|
||||
|
||||
def get_all_devices(self) -> list[DeviceModel]:
|
||||
"""Return list of all fibaro devices."""
|
||||
return self._fibaro_device_manager.get_devices()
|
||||
|
||||
def read_fibaro_info(self) -> InfoModel:
|
||||
"""Return the general info about the hub."""
|
||||
return self._fibaro_info
|
||||
|
56
homeassistant/components/fibaro/diagnostics.py
Normal file
56
homeassistant/components/fibaro/diagnostics.py
Normal file
@ -0,0 +1,56 @@
|
||||
"""Diagnostics support for fibaro integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_device import DeviceModel
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from . import CONF_IMPORT_PLUGINS, FibaroConfigEntry
|
||||
|
||||
TO_REDACT = {"password"}
|
||||
|
||||
|
||||
def _create_diagnostics_data(
|
||||
config_entry: FibaroConfigEntry, devices: list[DeviceModel]
|
||||
) -> dict[str, Any]:
|
||||
"""Combine diagnostics information and redact sensitive information."""
|
||||
return {
|
||||
"config": {CONF_IMPORT_PLUGINS: config_entry.data.get(CONF_IMPORT_PLUGINS)},
|
||||
"fibaro_devices": async_redact_data([d.raw_data for d in devices], TO_REDACT),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: FibaroConfigEntry
|
||||
) -> Mapping[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
controller = config_entry.runtime_data
|
||||
devices = controller.get_all_devices()
|
||||
return _create_diagnostics_data(config_entry, devices)
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, config_entry: FibaroConfigEntry, device: DeviceEntry
|
||||
) -> Mapping[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
controller = config_entry.runtime_data
|
||||
devices = controller.get_all_devices()
|
||||
|
||||
ha_device_id = next(iter(device.identifiers))[1]
|
||||
if ha_device_id == controller.hub_serial:
|
||||
# special case where the device is representing the fibaro hub
|
||||
return _create_diagnostics_data(config_entry, devices)
|
||||
|
||||
# normal devices are represented by a parent / child structure
|
||||
filtered_devices = [
|
||||
device
|
||||
for device in devices
|
||||
if ha_device_id in (device.fibaro_id, device.parent_fibaro_id)
|
||||
]
|
||||
return _create_diagnostics_data(config_entry, filtered_devices)
|
@ -252,9 +252,7 @@ class HomeConnectCoordinator(
|
||||
appliance_data = await self._get_appliance_data(
|
||||
appliance_info, self.data.get(appliance_info.ha_id)
|
||||
)
|
||||
if event_message_ha_id in self.data:
|
||||
self.data[event_message_ha_id].update(appliance_data)
|
||||
else:
|
||||
if event_message_ha_id not in self.data:
|
||||
self.data[event_message_ha_id] = appliance_data
|
||||
for listener, context in self._special_listeners.values():
|
||||
if (
|
||||
|
@ -17,7 +17,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .common import setup_home_connect_entry
|
||||
from .const import (
|
||||
APPLIANCES_WITH_PROGRAMS,
|
||||
AVAILABLE_MAPS_ENUM,
|
||||
BEAN_AMOUNT_OPTIONS,
|
||||
BEAN_CONTAINER_OPTIONS,
|
||||
@ -313,7 +312,7 @@ def _get_entities_for_appliance(
|
||||
HomeConnectProgramSelectEntity(entry.runtime_data, appliance, desc)
|
||||
for desc in PROGRAM_SELECT_ENTITY_DESCRIPTIONS
|
||||
]
|
||||
if appliance.info.type in APPLIANCES_WITH_PROGRAMS
|
||||
if appliance.programs
|
||||
else []
|
||||
),
|
||||
*[
|
||||
|
@ -136,7 +136,7 @@
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"manual": "Manual"
|
||||
"manual": "[%key:common::state::manual%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
"title": "Pair with a device via HomeKit Accessory Protocol",
|
||||
"description": "HomeKit Device communicates with {name} ({category}) over the local area network using a secure encrypted connection without a separate HomeKit Controller or iCloud. Enter your eight digit HomeKit pairing code (in the format XXX-XX-XXX) to use this accessory. This code is usually found on the device itself or in the packaging, often close to a HomeKit bar code, next to the image of a small house.",
|
||||
"data": {
|
||||
"pairing_code": "Pairing Code",
|
||||
"pairing_code": "Pairing code",
|
||||
"allow_insecure_setup_codes": "Allow pairing with insecure setup codes."
|
||||
}
|
||||
},
|
||||
@ -112,7 +112,7 @@
|
||||
"air_purifier_state_target": {
|
||||
"state": {
|
||||
"automatic": "Automatic",
|
||||
"manual": "Manual"
|
||||
"manual": "[%key:common::state::manual%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -1,27 +1,27 @@
|
||||
"""The La Marzocco integration."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from packaging import version
|
||||
from pylamarzocco.clients.bluetooth import LaMarzoccoBluetoothClient
|
||||
from pylamarzocco.clients.cloud import LaMarzoccoCloudClient
|
||||
from pylamarzocco.clients.local import LaMarzoccoLocalClient
|
||||
from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
from pylamarzocco import (
|
||||
LaMarzoccoBluetoothClient,
|
||||
LaMarzoccoCloudClient,
|
||||
LaMarzoccoMachine,
|
||||
)
|
||||
from pylamarzocco.const import FirmwareType
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
|
||||
from homeassistant.components.bluetooth import async_discovered_service_info
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
CONF_MODEL,
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
@ -29,9 +29,9 @@ from .const import CONF_USE_BLUETOOTH, DOMAIN
|
||||
from .coordinator import (
|
||||
LaMarzoccoConfigEntry,
|
||||
LaMarzoccoConfigUpdateCoordinator,
|
||||
LaMarzoccoFirmwareUpdateCoordinator,
|
||||
LaMarzoccoRuntimeData,
|
||||
LaMarzoccoStatisticsUpdateCoordinator,
|
||||
LaMarzoccoScheduleUpdateCoordinator,
|
||||
LaMarzoccoSettingsUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS = [
|
||||
@ -40,11 +40,12 @@ PLATFORMS = [
|
||||
Platform.CALENDAR,
|
||||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.UPDATE,
|
||||
]
|
||||
|
||||
BT_MODEL_PREFIXES = ("MICRA", "MINI", "GS3")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -61,31 +62,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
client=client,
|
||||
)
|
||||
|
||||
# initialize the firmware update coordinator early to check the firmware version
|
||||
firmware_device = LaMarzoccoMachine(
|
||||
model=entry.data[CONF_MODEL],
|
||||
serial_number=entry.unique_id,
|
||||
name=entry.data[CONF_NAME],
|
||||
cloud_client=cloud_client,
|
||||
)
|
||||
try:
|
||||
settings = await cloud_client.get_thing_settings(serial)
|
||||
except AuthFail as ex:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from ex
|
||||
except RequestNotSuccessful as ex:
|
||||
_LOGGER.debug(ex, exc_info=True)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN, translation_key="api_error"
|
||||
) from ex
|
||||
|
||||
firmware_coordinator = LaMarzoccoFirmwareUpdateCoordinator(
|
||||
hass, entry, firmware_device
|
||||
)
|
||||
await firmware_coordinator.async_config_entry_first_refresh()
|
||||
gateway_version = version.parse(
|
||||
firmware_device.firmware[FirmwareType.GATEWAY].current_version
|
||||
settings.firmwares[FirmwareType.GATEWAY].build_version
|
||||
)
|
||||
|
||||
if gateway_version >= version.parse("v5.0.9"):
|
||||
# remove host from config entry, it is not supported anymore
|
||||
data = {k: v for k, v in entry.data.items() if k != CONF_HOST}
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=data,
|
||||
)
|
||||
|
||||
elif gateway_version < version.parse("v3.4-rc5"):
|
||||
if gateway_version < version.parse("v5.0.9"):
|
||||
# incompatible gateway firmware, create an issue
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
@ -97,24 +90,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
translation_placeholders={"gateway_version": str(gateway_version)},
|
||||
)
|
||||
|
||||
# initialize local API
|
||||
local_client: LaMarzoccoLocalClient | None = None
|
||||
if (host := entry.data.get(CONF_HOST)) is not None:
|
||||
_LOGGER.debug("Initializing local API")
|
||||
local_client = LaMarzoccoLocalClient(
|
||||
host=host,
|
||||
local_bearer=entry.data[CONF_TOKEN],
|
||||
client=client,
|
||||
)
|
||||
|
||||
# initialize Bluetooth
|
||||
bluetooth_client: LaMarzoccoBluetoothClient | None = None
|
||||
if entry.options.get(CONF_USE_BLUETOOTH, True):
|
||||
|
||||
def bluetooth_configured() -> bool:
|
||||
return entry.data.get(CONF_MAC, "") and entry.data.get(CONF_NAME, "")
|
||||
|
||||
if not bluetooth_configured():
|
||||
if entry.options.get(CONF_USE_BLUETOOTH, True) and (
|
||||
token := settings.ble_auth_token
|
||||
):
|
||||
if CONF_MAC not in entry.data:
|
||||
for discovery_info in async_discovered_service_info(hass):
|
||||
if (
|
||||
(name := discovery_info.name)
|
||||
@ -128,38 +109,43 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_MAC: discovery_info.address,
|
||||
CONF_NAME: discovery_info.name,
|
||||
},
|
||||
)
|
||||
break
|
||||
|
||||
if bluetooth_configured():
|
||||
if not entry.data[CONF_TOKEN]:
|
||||
# update the token in the config entry
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_TOKEN: token,
|
||||
},
|
||||
)
|
||||
|
||||
if CONF_MAC in entry.data:
|
||||
_LOGGER.debug("Initializing Bluetooth device")
|
||||
bluetooth_client = LaMarzoccoBluetoothClient(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
serial_number=serial,
|
||||
token=entry.data[CONF_TOKEN],
|
||||
address_or_ble_device=entry.data[CONF_MAC],
|
||||
ble_token=token,
|
||||
)
|
||||
|
||||
device = LaMarzoccoMachine(
|
||||
model=entry.data[CONF_MODEL],
|
||||
serial_number=entry.unique_id,
|
||||
name=entry.data[CONF_NAME],
|
||||
cloud_client=cloud_client,
|
||||
local_client=local_client,
|
||||
bluetooth_client=bluetooth_client,
|
||||
)
|
||||
|
||||
coordinators = LaMarzoccoRuntimeData(
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client),
|
||||
firmware_coordinator,
|
||||
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
|
||||
)
|
||||
|
||||
# API does not like concurrent requests, so no asyncio.gather here
|
||||
await coordinators.config_coordinator.async_config_entry_first_refresh()
|
||||
await coordinators.statistics_coordinator.async_config_entry_first_refresh()
|
||||
await asyncio.gather(
|
||||
coordinators.config_coordinator.async_config_entry_first_refresh(),
|
||||
coordinators.settings_coordinator.async_config_entry_first_refresh(),
|
||||
coordinators.schedule_coordinator.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
@ -184,41 +170,45 @@ async def async_migrate_entry(
|
||||
hass: HomeAssistant, entry: LaMarzoccoConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate config entry."""
|
||||
if entry.version > 2:
|
||||
if entry.version > 3:
|
||||
# guard against downgrade from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1:
|
||||
_LOGGER.error(
|
||||
"Migration from version 1 is no longer supported, please remove and re-add the integration"
|
||||
)
|
||||
return False
|
||||
|
||||
if entry.version == 2:
|
||||
cloud_client = LaMarzoccoCloudClient(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
)
|
||||
try:
|
||||
fleet = await cloud_client.get_customer_fleet()
|
||||
things = await cloud_client.list_things()
|
||||
except (AuthFail, RequestNotSuccessful) as exc:
|
||||
_LOGGER.error("Migration failed with error %s", exc)
|
||||
return False
|
||||
|
||||
assert entry.unique_id is not None
|
||||
device = fleet[entry.unique_id]
|
||||
v2_data = {
|
||||
v3_data = {
|
||||
CONF_USERNAME: entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: entry.data[CONF_PASSWORD],
|
||||
CONF_MODEL: device.model,
|
||||
CONF_NAME: device.name,
|
||||
CONF_TOKEN: device.communication_key,
|
||||
CONF_TOKEN: next(
|
||||
(
|
||||
thing.ble_auth_token
|
||||
for thing in things
|
||||
if thing.serial_number == entry.unique_id
|
||||
),
|
||||
None,
|
||||
),
|
||||
}
|
||||
|
||||
if CONF_HOST in entry.data:
|
||||
v2_data[CONF_HOST] = entry.data[CONF_HOST]
|
||||
|
||||
if CONF_MAC in entry.data:
|
||||
v2_data[CONF_MAC] = entry.data[CONF_MAC]
|
||||
|
||||
v3_data[CONF_MAC] = entry.data[CONF_MAC]
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=v2_data,
|
||||
version=2,
|
||||
data=v3_data,
|
||||
version=3,
|
||||
)
|
||||
_LOGGER.debug("Migrated La Marzocco config entry to version 2")
|
||||
|
||||
return True
|
||||
|
@ -2,9 +2,10 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from pylamarzocco.const import MachineModel
|
||||
from pylamarzocco.models import LaMarzoccoMachineConfig
|
||||
from pylamarzocco.const import BackFlushStatus, MachineState, WidgetType
|
||||
from pylamarzocco.models import BackFlush, BaseWidgetOutput, MachineStatus
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@ -16,7 +17,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import LaMarzoccoConfigEntry
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@ -29,7 +30,7 @@ class LaMarzoccoBinarySensorEntityDescription(
|
||||
):
|
||||
"""Description of a La Marzocco binary sensor."""
|
||||
|
||||
is_on_fn: Callable[[LaMarzoccoMachineConfig], bool | None]
|
||||
is_on_fn: Callable[[dict[WidgetType, BaseWidgetOutput]], bool | None]
|
||||
|
||||
|
||||
ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
|
||||
@ -37,32 +38,30 @@ ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
|
||||
key="water_tank",
|
||||
translation_key="water_tank",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
is_on_fn=lambda config: not config.water_contact,
|
||||
is_on_fn=lambda config: WidgetType.CM_NO_WATER in config,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
supported_fn=lambda coordinator: coordinator.local_connection_configured,
|
||||
),
|
||||
LaMarzoccoBinarySensorEntityDescription(
|
||||
key="brew_active",
|
||||
translation_key="brew_active",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
is_on_fn=lambda config: config.brew_active,
|
||||
available_fn=lambda device: device.websocket_connected,
|
||||
is_on_fn=(
|
||||
lambda config: cast(
|
||||
MachineStatus, config[WidgetType.CM_MACHINE_STATUS]
|
||||
).status
|
||||
is MachineState.BREWING
|
||||
),
|
||||
available_fn=lambda device: device.websocket.connected,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
LaMarzoccoBinarySensorEntityDescription(
|
||||
key="backflush_enabled",
|
||||
translation_key="backflush_enabled",
|
||||
device_class=BinarySensorDeviceClass.RUNNING,
|
||||
is_on_fn=lambda config: config.backflush_enabled,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
SCALE_ENTITIES: tuple[LaMarzoccoBinarySensorEntityDescription, ...] = (
|
||||
LaMarzoccoBinarySensorEntityDescription(
|
||||
key="connected",
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
is_on_fn=lambda config: config.scale.connected if config.scale else None,
|
||||
is_on_fn=(
|
||||
lambda config: cast(BackFlush, config[WidgetType.CM_BACK_FLUSH]).status
|
||||
is BackFlushStatus.REQUESTED
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
@ -76,30 +75,11 @@ async def async_setup_entry(
|
||||
"""Set up binary sensor entities."""
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
|
||||
entities = [
|
||||
async_add_entities(
|
||||
LaMarzoccoBinarySensorEntity(coordinator, description)
|
||||
for description in ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
]
|
||||
|
||||
if (
|
||||
coordinator.device.model in (MachineModel.LINEA_MINI, MachineModel.LINEA_MINI_R)
|
||||
and coordinator.device.config.scale
|
||||
):
|
||||
entities.extend(
|
||||
LaMarzoccoScaleBinarySensorEntity(coordinator, description)
|
||||
for description in SCALE_ENTITIES
|
||||
)
|
||||
|
||||
def _async_add_new_scale() -> None:
|
||||
async_add_entities(
|
||||
LaMarzoccoScaleBinarySensorEntity(coordinator, description)
|
||||
for description in SCALE_ENTITIES
|
||||
)
|
||||
|
||||
coordinator.new_device_callback.append(_async_add_new_scale)
|
||||
|
||||
async_add_entities(entities)
|
||||
)
|
||||
|
||||
|
||||
class LaMarzoccoBinarySensorEntity(LaMarzoccoEntity, BinarySensorEntity):
|
||||
@ -110,12 +90,6 @@ class LaMarzoccoBinarySensorEntity(LaMarzoccoEntity, BinarySensorEntity):
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator.device.config)
|
||||
|
||||
|
||||
class LaMarzoccoScaleBinarySensorEntity(
|
||||
LaMarzoccoBinarySensorEntity, LaMarzoccScaleEntity
|
||||
):
|
||||
"""Binary sensor for La Marzocco scales."""
|
||||
|
||||
entity_description: LaMarzoccoBinarySensorEntityDescription
|
||||
return self.entity_description.is_on_fn(
|
||||
self.coordinator.device.dashboard.config
|
||||
)
|
||||
|
@ -3,7 +3,7 @@
|
||||
from collections.abc import Iterator
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pylamarzocco.models import LaMarzoccoWakeUpSleepEntry
|
||||
from pylamarzocco.const import WeekDay
|
||||
|
||||
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -18,15 +18,15 @@ PARALLEL_UPDATES = 0
|
||||
|
||||
CALENDAR_KEY = "auto_on_off_schedule"
|
||||
|
||||
DAY_OF_WEEK = [
|
||||
"monday",
|
||||
"tuesday",
|
||||
"wednesday",
|
||||
"thursday",
|
||||
"friday",
|
||||
"saturday",
|
||||
"sunday",
|
||||
]
|
||||
WEEKDAY_TO_ENUM = {
|
||||
0: WeekDay.MONDAY,
|
||||
1: WeekDay.TUESDAY,
|
||||
2: WeekDay.WEDNESDAY,
|
||||
3: WeekDay.THURSDAY,
|
||||
4: WeekDay.FRIDAY,
|
||||
5: WeekDay.SATURDAY,
|
||||
6: WeekDay.SUNDAY,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@ -36,10 +36,12 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up switch entities and services."""
|
||||
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
coordinator = entry.runtime_data.schedule_coordinator
|
||||
|
||||
async_add_entities(
|
||||
LaMarzoccoCalendarEntity(coordinator, CALENDAR_KEY, wake_up_sleep_entry)
|
||||
for wake_up_sleep_entry in coordinator.device.config.wake_up_sleep_entries.values()
|
||||
LaMarzoccoCalendarEntity(coordinator, CALENDAR_KEY, schedule.identifier)
|
||||
for schedule in coordinator.device.schedule.smart_wake_up_sleep.schedules
|
||||
if schedule.identifier
|
||||
)
|
||||
|
||||
|
||||
@ -52,12 +54,12 @@ class LaMarzoccoCalendarEntity(LaMarzoccoBaseEntity, CalendarEntity):
|
||||
self,
|
||||
coordinator: LaMarzoccoUpdateCoordinator,
|
||||
key: str,
|
||||
wake_up_sleep_entry: LaMarzoccoWakeUpSleepEntry,
|
||||
identifier: str,
|
||||
) -> None:
|
||||
"""Set up calendar."""
|
||||
super().__init__(coordinator, f"{key}_{wake_up_sleep_entry.entry_id}")
|
||||
self.wake_up_sleep_entry = wake_up_sleep_entry
|
||||
self._attr_translation_placeholders = {"id": wake_up_sleep_entry.entry_id}
|
||||
super().__init__(coordinator, f"{key}_{identifier}")
|
||||
self._identifier = identifier
|
||||
self._attr_translation_placeholders = {"id": identifier}
|
||||
|
||||
@property
|
||||
def event(self) -> CalendarEvent | None:
|
||||
@ -112,24 +114,31 @@ class LaMarzoccoCalendarEntity(LaMarzoccoBaseEntity, CalendarEntity):
|
||||
def _async_get_calendar_event(self, date: datetime) -> CalendarEvent | None:
|
||||
"""Return calendar event for a given weekday."""
|
||||
|
||||
schedule_entry = (
|
||||
self.coordinator.device.schedule.smart_wake_up_sleep.schedules_dict[
|
||||
self._identifier
|
||||
]
|
||||
)
|
||||
# check first if auto/on off is turned on in general
|
||||
if not self.wake_up_sleep_entry.enabled:
|
||||
if not schedule_entry.enabled:
|
||||
return None
|
||||
|
||||
# parse the schedule for the day
|
||||
|
||||
if DAY_OF_WEEK[date.weekday()] not in self.wake_up_sleep_entry.days:
|
||||
if WEEKDAY_TO_ENUM[date.weekday()] not in schedule_entry.days:
|
||||
return None
|
||||
|
||||
hour_on, minute_on = self.wake_up_sleep_entry.time_on.split(":")
|
||||
hour_off, minute_off = self.wake_up_sleep_entry.time_off.split(":")
|
||||
hour_on = schedule_entry.on_time_minutes // 60
|
||||
minute_on = schedule_entry.on_time_minutes % 60
|
||||
hour_off = schedule_entry.off_time_minutes // 60
|
||||
minute_off = schedule_entry.off_time_minutes % 60
|
||||
|
||||
# if off time is 24:00, then it means the off time is the next day
|
||||
# only for legacy schedules
|
||||
day_offset = 0
|
||||
if hour_off == "24":
|
||||
if hour_off == 24:
|
||||
# if the machine is scheduled to turn off at midnight, we need to
|
||||
# set the end date to the next day
|
||||
day_offset = 1
|
||||
hour_off = "0"
|
||||
hour_off = 0
|
||||
|
||||
end_date = date.replace(
|
||||
hour=int(hour_off),
|
||||
|
@ -7,10 +7,9 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from pylamarzocco.clients.cloud import LaMarzoccoCloudClient
|
||||
from pylamarzocco.clients.local import LaMarzoccoLocalClient
|
||||
from pylamarzocco import LaMarzoccoCloudClient
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
from pylamarzocco.models import LaMarzoccoDeviceInfo
|
||||
from pylamarzocco.models import Thing
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
@ -26,9 +25,7 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_ADDRESS,
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
CONF_MODEL,
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
@ -59,14 +56,14 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for La Marzocco."""
|
||||
|
||||
VERSION = 2
|
||||
VERSION = 3
|
||||
|
||||
_client: ClientSession
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._config: dict[str, Any] = {}
|
||||
self._fleet: dict[str, LaMarzoccoDeviceInfo] = {}
|
||||
self._things: dict[str, Thing] = {}
|
||||
self._discovered: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
@ -83,7 +80,6 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data = {
|
||||
**data,
|
||||
**user_input,
|
||||
**self._discovered,
|
||||
}
|
||||
|
||||
self._client = async_create_clientsession(self.hass)
|
||||
@ -93,7 +89,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
client=self._client,
|
||||
)
|
||||
try:
|
||||
self._fleet = await cloud_client.get_customer_fleet()
|
||||
things = await cloud_client.list_things()
|
||||
except AuthFail:
|
||||
_LOGGER.debug("Server rejected login credentials")
|
||||
errors["base"] = "invalid_auth"
|
||||
@ -101,37 +97,30 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.error("Error connecting to server: %s", exc)
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
if not self._fleet:
|
||||
self._things = {thing.serial_number: thing for thing in things}
|
||||
if not self._things:
|
||||
errors["base"] = "no_machines"
|
||||
|
||||
if not errors:
|
||||
self._config = data
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), data=data
|
||||
)
|
||||
if self._discovered:
|
||||
if self._discovered[CONF_MACHINE] not in self._fleet:
|
||||
if self._discovered[CONF_MACHINE] not in self._things:
|
||||
errors["base"] = "machine_not_found"
|
||||
else:
|
||||
self._config = data
|
||||
# if DHCP discovery was used, auto fill machine selection
|
||||
if CONF_HOST in self._discovered:
|
||||
return await self.async_step_machine_selection(
|
||||
user_input={
|
||||
CONF_HOST: self._discovered[CONF_HOST],
|
||||
CONF_MACHINE: self._discovered[CONF_MACHINE],
|
||||
}
|
||||
)
|
||||
# if Bluetooth discovery was used, only select host
|
||||
return self.async_show_form(
|
||||
step_id="machine_selection",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Optional(CONF_HOST): cv.string}
|
||||
),
|
||||
)
|
||||
# store discovered connection address
|
||||
if CONF_MAC in self._discovered:
|
||||
self._config[CONF_MAC] = self._discovered[CONF_MAC]
|
||||
if CONF_ADDRESS in self._discovered:
|
||||
self._config[CONF_ADDRESS] = self._discovered[CONF_ADDRESS]
|
||||
|
||||
return await self.async_step_machine_selection(
|
||||
user_input={CONF_MACHINE: self._discovered[CONF_MACHINE]}
|
||||
)
|
||||
if not errors:
|
||||
self._config = data
|
||||
return await self.async_step_machine_selection()
|
||||
|
||||
placeholders: dict[str, str] | None = None
|
||||
@ -175,18 +164,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
serial_number = self._discovered[CONF_MACHINE]
|
||||
|
||||
selected_device = self._fleet[serial_number]
|
||||
|
||||
# validate local connection if host is provided
|
||||
if user_input.get(CONF_HOST):
|
||||
if not await LaMarzoccoLocalClient.validate_connection(
|
||||
client=self._client,
|
||||
host=user_input[CONF_HOST],
|
||||
token=selected_device.communication_key,
|
||||
):
|
||||
errors[CONF_HOST] = "cannot_connect"
|
||||
else:
|
||||
self._config[CONF_HOST] = user_input[CONF_HOST]
|
||||
selected_device = self._things[serial_number]
|
||||
|
||||
if not errors:
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
@ -200,18 +178,16 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=selected_device.name,
|
||||
data={
|
||||
**self._config,
|
||||
CONF_NAME: selected_device.name,
|
||||
CONF_MODEL: selected_device.model,
|
||||
CONF_TOKEN: selected_device.communication_key,
|
||||
CONF_TOKEN: self._things[serial_number].ble_auth_token,
|
||||
},
|
||||
)
|
||||
|
||||
machine_options = [
|
||||
SelectOptionDict(
|
||||
value=device.serial_number,
|
||||
label=f"{device.model} ({device.serial_number})",
|
||||
value=thing.serial_number,
|
||||
label=f"{thing.name} ({thing.serial_number})",
|
||||
)
|
||||
for device in self._fleet.values()
|
||||
for thing in self._things.values()
|
||||
]
|
||||
|
||||
machine_selection_schema = vol.Schema(
|
||||
@ -224,7 +200,6 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_HOST): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
@ -304,7 +279,6 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(serial)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={
|
||||
CONF_HOST: discovery_info.ip,
|
||||
CONF_ADDRESS: discovery_info.macaddress,
|
||||
}
|
||||
)
|
||||
@ -316,8 +290,8 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
discovery_info.ip,
|
||||
)
|
||||
|
||||
self._discovered[CONF_NAME] = discovery_info.hostname
|
||||
self._discovered[CONF_MACHINE] = serial
|
||||
self._discovered[CONF_HOST] = discovery_info.ip
|
||||
self._discovered[CONF_ADDRESS] = discovery_info.macaddress
|
||||
|
||||
return await self.async_step_user()
|
||||
|
@ -3,28 +3,25 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pylamarzocco.clients.local import LaMarzoccoLocalClient
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
FIRMWARE_UPDATE_INTERVAL = timedelta(hours=1)
|
||||
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
SETTINGS_UPDATE_INTERVAL = timedelta(hours=1)
|
||||
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -33,8 +30,8 @@ class LaMarzoccoRuntimeData:
|
||||
"""Runtime data for La Marzocco."""
|
||||
|
||||
config_coordinator: LaMarzoccoConfigUpdateCoordinator
|
||||
firmware_coordinator: LaMarzoccoFirmwareUpdateCoordinator
|
||||
statistics_coordinator: LaMarzoccoStatisticsUpdateCoordinator
|
||||
settings_coordinator: LaMarzoccoSettingsUpdateCoordinator
|
||||
schedule_coordinator: LaMarzoccoScheduleUpdateCoordinator
|
||||
|
||||
|
||||
type LaMarzoccoConfigEntry = ConfigEntry[LaMarzoccoRuntimeData]
|
||||
@ -51,7 +48,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
hass: HomeAssistant,
|
||||
entry: LaMarzoccoConfigEntry,
|
||||
device: LaMarzoccoMachine,
|
||||
local_client: LaMarzoccoLocalClient | None = None,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
@ -62,9 +58,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
update_interval=self._default_update_interval,
|
||||
)
|
||||
self.device = device
|
||||
self.local_connection_configured = local_client is not None
|
||||
self._local_client = local_client
|
||||
self.new_device_callback: list[Callable] = []
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Do the data update."""
|
||||
@ -89,30 +82,22 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Class to handle fetching data from the La Marzocco API centrally."""
|
||||
|
||||
_scale_address: str | None = None
|
||||
|
||||
async def _async_connect_websocket(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
if self._local_client is not None and (
|
||||
self._local_client.websocket is None or self._local_client.websocket.closed
|
||||
):
|
||||
if not self.device.websocket.connected:
|
||||
_LOGGER.debug("Init WebSocket in background task")
|
||||
|
||||
self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self.device.websocket_connect(
|
||||
notify_callback=lambda: self.async_set_updated_data(None)
|
||||
target=self.device.connect_dashboard_websocket(
|
||||
update_callback=lambda _: self.async_set_updated_data(None)
|
||||
),
|
||||
name="lm_websocket_task",
|
||||
)
|
||||
|
||||
async def websocket_close(_: Any | None = None) -> None:
|
||||
if (
|
||||
self._local_client is not None
|
||||
and self._local_client.websocket is not None
|
||||
and not self._local_client.websocket.closed
|
||||
):
|
||||
await self._local_client.websocket.close()
|
||||
if self.device.websocket.connected:
|
||||
await self.device.websocket.disconnect()
|
||||
|
||||
self.config_entry.async_on_unload(
|
||||
self.hass.bus.async_listen_once(
|
||||
@ -123,47 +108,28 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
await self.device.get_config()
|
||||
_LOGGER.debug("Current status: %s", str(self.device.config))
|
||||
await self.device.get_dashboard()
|
||||
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
|
||||
await self._async_connect_websocket()
|
||||
self._async_add_remove_scale()
|
||||
|
||||
@callback
|
||||
def _async_add_remove_scale(self) -> None:
|
||||
"""Add or remove a scale when added or removed."""
|
||||
if self.device.config.scale and not self._scale_address:
|
||||
self._scale_address = self.device.config.scale.address
|
||||
for scale_callback in self.new_device_callback:
|
||||
scale_callback()
|
||||
elif not self.device.config.scale and self._scale_address:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, self._scale_address)}
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
self._scale_address = None
|
||||
|
||||
|
||||
class LaMarzoccoFirmwareUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Coordinator for La Marzocco firmware."""
|
||||
class LaMarzoccoSettingsUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Coordinator for La Marzocco settings."""
|
||||
|
||||
_default_update_interval = FIRMWARE_UPDATE_INTERVAL
|
||||
_default_update_interval = SETTINGS_UPDATE_INTERVAL
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
await self.device.get_firmware()
|
||||
_LOGGER.debug("Current firmware: %s", str(self.device.firmware))
|
||||
await self.device.get_settings()
|
||||
_LOGGER.debug("Current settings: %s", self.device.settings.to_dict())
|
||||
|
||||
|
||||
class LaMarzoccoStatisticsUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Coordinator for La Marzocco statistics."""
|
||||
class LaMarzoccoScheduleUpdateCoordinator(LaMarzoccoUpdateCoordinator):
|
||||
"""Coordinator for La Marzocco schedule."""
|
||||
|
||||
_default_update_interval = STATISTICS_UPDATE_INTERVAL
|
||||
_default_update_interval = SCHEDULE_UPDATE_INTERVAL
|
||||
|
||||
async def _internal_async_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
await self.device.get_statistics()
|
||||
_LOGGER.debug("Current statistics: %s", str(self.device.statistics))
|
||||
await self.device.get_schedule()
|
||||
_LOGGER.debug("Current schedule: %s", self.device.schedule.to_dict())
|
||||
|
@ -2,10 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any, TypedDict
|
||||
|
||||
from pylamarzocco.const import FirmwareType
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -17,15 +14,6 @@ TO_REDACT = {
|
||||
}
|
||||
|
||||
|
||||
class DiagnosticsData(TypedDict):
|
||||
"""Diagnostic data for La Marzocco."""
|
||||
|
||||
model: str
|
||||
config: dict[str, Any]
|
||||
firmware: list[dict[FirmwareType, dict[str, Any]]]
|
||||
statistics: dict[str, Any]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: LaMarzoccoConfigEntry,
|
||||
@ -33,12 +21,4 @@ async def async_get_config_entry_diagnostics(
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
device = coordinator.device
|
||||
# collect all data sources
|
||||
diagnostics_data = DiagnosticsData(
|
||||
model=device.model,
|
||||
config=asdict(device.config),
|
||||
firmware=[{key: asdict(firmware)} for key, firmware in device.firmware.items()],
|
||||
statistics=asdict(device.statistics),
|
||||
)
|
||||
|
||||
return async_redact_data(diagnostics_data, TO_REDACT)
|
||||
return async_redact_data(device.to_dict(), TO_REDACT)
|
||||
|
@ -2,10 +2,9 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco.const import FirmwareType
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_MAC
|
||||
from homeassistant.helpers.device_registry import (
|
||||
@ -46,12 +45,12 @@ class LaMarzoccoBaseEntity(
|
||||
self._attr_unique_id = f"{device.serial_number}_{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.serial_number)},
|
||||
name=device.name,
|
||||
name=device.dashboard.name,
|
||||
manufacturer="La Marzocco",
|
||||
model=device.full_model_name,
|
||||
model_id=device.model,
|
||||
model=device.dashboard.model_name.value,
|
||||
model_id=device.dashboard.model_code.value,
|
||||
serial_number=device.serial_number,
|
||||
sw_version=device.firmware[FirmwareType.MACHINE].current_version,
|
||||
sw_version=device.settings.firmwares[FirmwareType.MACHINE].build_version,
|
||||
)
|
||||
connections: set[tuple[str, str]] = set()
|
||||
if coordinator.config_entry.data.get(CONF_ADDRESS):
|
||||
@ -86,26 +85,3 @@ class LaMarzoccoEntity(LaMarzoccoBaseEntity):
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
|
||||
class LaMarzoccScaleEntity(LaMarzoccoEntity):
|
||||
"""Common class for scale."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LaMarzoccoUpdateCoordinator,
|
||||
entity_description: LaMarzoccoEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, entity_description)
|
||||
scale = coordinator.device.config.scale
|
||||
if TYPE_CHECKING:
|
||||
assert scale
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, scale.address)},
|
||||
name=scale.name,
|
||||
manufacturer="Acaia",
|
||||
model="Lunar",
|
||||
model_id="Y.301",
|
||||
via_device=(DOMAIN, coordinator.device.serial_number),
|
||||
)
|
||||
|
@ -34,36 +34,11 @@
|
||||
"dose": {
|
||||
"default": "mdi:cup-water"
|
||||
},
|
||||
"prebrew_off": {
|
||||
"default": "mdi:water-off"
|
||||
},
|
||||
"prebrew_on": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"preinfusion_off": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"scale_target": {
|
||||
"default": "mdi:scale-balance"
|
||||
},
|
||||
"smart_standby_time": {
|
||||
"default": "mdi:timer"
|
||||
},
|
||||
"steam_temp": {
|
||||
"default": "mdi:thermometer-water"
|
||||
},
|
||||
"tea_water_duration": {
|
||||
"default": "mdi:timer-sand"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"active_bbw": {
|
||||
"default": "mdi:alpha-u",
|
||||
"state": {
|
||||
"a": "mdi:alpha-a",
|
||||
"b": "mdi:alpha-b"
|
||||
}
|
||||
},
|
||||
"smart_standby_mode": {
|
||||
"default": "mdi:power",
|
||||
"state": {
|
||||
@ -88,26 +63,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"drink_stats_coffee": {
|
||||
"default": "mdi:chart-line"
|
||||
},
|
||||
"drink_stats_flushing": {
|
||||
"default": "mdi:chart-line"
|
||||
},
|
||||
"drink_stats_coffee_key": {
|
||||
"default": "mdi:chart-scatter-plot"
|
||||
},
|
||||
"shot_timer": {
|
||||
"default": "mdi:timer"
|
||||
},
|
||||
"current_temp_coffee": {
|
||||
"default": "mdi:thermometer"
|
||||
},
|
||||
"current_temp_steam": {
|
||||
"default": "mdi:thermometer"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"main": {
|
||||
"default": "mdi:power",
|
||||
|
@ -34,8 +34,8 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lamarzocco",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==1.4.9"]
|
||||
"requirements": ["pylamarzocco==2.0.0b1"]
|
||||
}
|
||||
|
@ -2,18 +2,12 @@
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from pylamarzocco.const import (
|
||||
KEYS_PER_MODEL,
|
||||
BoilerType,
|
||||
MachineModel,
|
||||
PhysicalKey,
|
||||
PrebrewMode,
|
||||
)
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco.const import WidgetType
|
||||
from pylamarzocco.exceptions import RequestNotSuccessful
|
||||
from pylamarzocco.models import LaMarzoccoMachineConfig
|
||||
from pylamarzocco.models import CoffeeBoiler
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
@ -32,8 +26,8 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity
|
||||
from .coordinator import LaMarzoccoConfigEntry
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@ -45,25 +39,10 @@ class LaMarzoccoNumberEntityDescription(
|
||||
):
|
||||
"""Description of a La Marzocco number entity."""
|
||||
|
||||
native_value_fn: Callable[[LaMarzoccoMachineConfig], float | int]
|
||||
native_value_fn: Callable[[LaMarzoccoMachine], float | int]
|
||||
set_value_fn: Callable[[LaMarzoccoMachine, float | int], Coroutine[Any, Any, bool]]
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LaMarzoccoKeyNumberEntityDescription(
|
||||
LaMarzoccoEntityDescription,
|
||||
NumberEntityDescription,
|
||||
):
|
||||
"""Description of an La Marzocco number entity with keys."""
|
||||
|
||||
native_value_fn: Callable[
|
||||
[LaMarzoccoMachineConfig, PhysicalKey], float | int | None
|
||||
]
|
||||
set_value_fn: Callable[
|
||||
[LaMarzoccoMachine, float | int, PhysicalKey], Coroutine[Any, Any, bool]
|
||||
]
|
||||
|
||||
|
||||
ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
key="coffee_temp",
|
||||
@ -73,43 +52,11 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
native_step=PRECISION_TENTHS,
|
||||
native_min_value=85,
|
||||
native_max_value=104,
|
||||
set_value_fn=lambda machine, temp: machine.set_temp(BoilerType.COFFEE, temp),
|
||||
native_value_fn=lambda config: config.boilers[
|
||||
BoilerType.COFFEE
|
||||
].target_temperature,
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
key="steam_temp",
|
||||
translation_key="steam_temp",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_min_value=126,
|
||||
native_max_value=131,
|
||||
set_value_fn=lambda machine, temp: machine.set_temp(BoilerType.STEAM, temp),
|
||||
native_value_fn=lambda config: config.boilers[
|
||||
BoilerType.STEAM
|
||||
].target_temperature,
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
in (
|
||||
MachineModel.GS3_AV,
|
||||
MachineModel.GS3_MP,
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
key="tea_water_duration",
|
||||
translation_key="tea_water_duration",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_min_value=0,
|
||||
native_max_value=30,
|
||||
set_value_fn=lambda machine, value: machine.set_dose_tea_water(int(value)),
|
||||
native_value_fn=lambda config: config.dose_hot_water,
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
in (
|
||||
MachineModel.GS3_AV,
|
||||
MachineModel.GS3_MP,
|
||||
set_value_fn=lambda machine, temp: machine.set_coffee_target_temperature(temp),
|
||||
native_value_fn=(
|
||||
lambda machine: cast(
|
||||
CoffeeBoiler, machine.dashboard.config[WidgetType.CM_COFFEE_BOILER]
|
||||
).target_temperature
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
@ -117,119 +64,18 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
translation_key="smart_standby_time",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
native_step=10,
|
||||
native_min_value=10,
|
||||
native_max_value=240,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda machine, value: machine.set_smart_standby(
|
||||
enabled=machine.config.smart_standby.enabled,
|
||||
mode=machine.config.smart_standby.mode,
|
||||
minutes=int(value),
|
||||
),
|
||||
native_value_fn=lambda config: config.smart_standby.minutes,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = (
|
||||
LaMarzoccoKeyNumberEntityDescription(
|
||||
key="prebrew_off",
|
||||
translation_key="prebrew_off",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_step=PRECISION_TENTHS,
|
||||
native_min_value=1,
|
||||
native_max_value=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda machine, value, key: machine.set_prebrew_time(
|
||||
prebrew_off_time=value, key=key
|
||||
),
|
||||
native_value_fn=lambda config, key: config.prebrew_configuration[key][
|
||||
0
|
||||
].off_time,
|
||||
available_fn=lambda device: len(device.config.prebrew_configuration) > 0
|
||||
and device.config.prebrew_mode
|
||||
in (PrebrewMode.PREBREW, PrebrewMode.PREBREW_ENABLED),
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
!= MachineModel.GS3_MP,
|
||||
),
|
||||
LaMarzoccoKeyNumberEntityDescription(
|
||||
key="prebrew_on",
|
||||
translation_key="prebrew_on",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_step=PRECISION_TENTHS,
|
||||
native_min_value=2,
|
||||
native_max_value=10,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda machine, value, key: machine.set_prebrew_time(
|
||||
prebrew_on_time=value, key=key
|
||||
),
|
||||
native_value_fn=lambda config, key: config.prebrew_configuration[key][
|
||||
0
|
||||
].off_time,
|
||||
available_fn=lambda device: len(device.config.prebrew_configuration) > 0
|
||||
and device.config.prebrew_mode
|
||||
in (PrebrewMode.PREBREW, PrebrewMode.PREBREW_ENABLED),
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
!= MachineModel.GS3_MP,
|
||||
),
|
||||
LaMarzoccoKeyNumberEntityDescription(
|
||||
key="preinfusion_off",
|
||||
translation_key="preinfusion_off",
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_step=PRECISION_TENTHS,
|
||||
native_min_value=2,
|
||||
native_max_value=29,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda machine, value, key: machine.set_preinfusion_time(
|
||||
preinfusion_time=value, key=key
|
||||
),
|
||||
native_value_fn=lambda config, key: config.prebrew_configuration[key][
|
||||
1
|
||||
].preinfusion_time,
|
||||
available_fn=lambda device: len(device.config.prebrew_configuration) > 0
|
||||
and device.config.prebrew_mode == PrebrewMode.PREINFUSION,
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
!= MachineModel.GS3_MP,
|
||||
),
|
||||
LaMarzoccoKeyNumberEntityDescription(
|
||||
key="dose",
|
||||
translation_key="dose",
|
||||
native_unit_of_measurement="ticks",
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_min_value=0,
|
||||
native_max_value=999,
|
||||
native_max_value=240,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda machine, ticks, key: machine.set_dose(
|
||||
dose=int(ticks), key=key
|
||||
),
|
||||
native_value_fn=lambda config, key: config.doses[key],
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
== MachineModel.GS3_AV,
|
||||
),
|
||||
)
|
||||
|
||||
SCALE_KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = (
|
||||
LaMarzoccoKeyNumberEntityDescription(
|
||||
key="scale_target",
|
||||
translation_key="scale_target",
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_min_value=1,
|
||||
native_max_value=100,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
set_value_fn=lambda machine, weight, key: machine.set_bbw_recipe_target(
|
||||
key, int(weight)
|
||||
),
|
||||
native_value_fn=lambda config, key: (
|
||||
config.bbw_settings.doses[key] if config.bbw_settings else None
|
||||
),
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.model
|
||||
in (MachineModel.LINEA_MINI, MachineModel.LINEA_MINI_R)
|
||||
and coordinator.device.config.scale is not None
|
||||
set_value_fn=(
|
||||
lambda machine, value: machine.set_smart_standby(
|
||||
enabled=machine.schedule.smart_wake_up_sleep.smart_stand_by_enabled,
|
||||
mode=machine.schedule.smart_wake_up_sleep.smart_stand_by_after,
|
||||
minutes=int(value),
|
||||
)
|
||||
),
|
||||
native_value_fn=lambda machine: machine.schedule.smart_wake_up_sleep.smart_stand_by_minutes,
|
||||
),
|
||||
)
|
||||
|
||||
@ -247,34 +93,6 @@ async def async_setup_entry(
|
||||
if description.supported_fn(coordinator)
|
||||
]
|
||||
|
||||
for description in KEY_ENTITIES:
|
||||
if description.supported_fn(coordinator):
|
||||
num_keys = KEYS_PER_MODEL[MachineModel(coordinator.device.model)]
|
||||
entities.extend(
|
||||
LaMarzoccoKeyNumberEntity(coordinator, description, key)
|
||||
for key in range(min(num_keys, 1), num_keys + 1)
|
||||
)
|
||||
|
||||
for description in SCALE_KEY_ENTITIES:
|
||||
if description.supported_fn(coordinator):
|
||||
if bbw_settings := coordinator.device.config.bbw_settings:
|
||||
entities.extend(
|
||||
LaMarzoccoScaleTargetNumberEntity(
|
||||
coordinator, description, int(key)
|
||||
)
|
||||
for key in bbw_settings.doses
|
||||
)
|
||||
|
||||
def _async_add_new_scale() -> None:
|
||||
if bbw_settings := coordinator.device.config.bbw_settings:
|
||||
async_add_entities(
|
||||
LaMarzoccoScaleTargetNumberEntity(coordinator, description, int(key))
|
||||
for description in SCALE_KEY_ENTITIES
|
||||
for key in bbw_settings.doses
|
||||
)
|
||||
|
||||
coordinator.new_device_callback.append(_async_add_new_scale)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@ -286,7 +104,7 @@ class LaMarzoccoNumberEntity(LaMarzoccoEntity, NumberEntity):
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.native_value_fn(self.coordinator.device.config)
|
||||
return self.entity_description.native_value_fn(self.coordinator.device)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the value."""
|
||||
@ -305,62 +123,3 @@ class LaMarzoccoNumberEntity(LaMarzoccoEntity, NumberEntity):
|
||||
},
|
||||
) from exc
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class LaMarzoccoKeyNumberEntity(LaMarzoccoEntity, NumberEntity):
|
||||
"""Number representing espresso machine with key support."""
|
||||
|
||||
entity_description: LaMarzoccoKeyNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LaMarzoccoUpdateCoordinator,
|
||||
description: LaMarzoccoKeyNumberEntityDescription,
|
||||
pyhsical_key: int,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator, description)
|
||||
|
||||
# Physical Key on the machine the entity represents.
|
||||
if pyhsical_key == 0:
|
||||
pyhsical_key = 1
|
||||
else:
|
||||
self._attr_translation_key = f"{description.translation_key}_key"
|
||||
self._attr_translation_placeholders = {"key": str(pyhsical_key)}
|
||||
self._attr_unique_id = f"{super()._attr_unique_id}_key{pyhsical_key}"
|
||||
self._attr_entity_registry_enabled_default = False
|
||||
self.pyhsical_key = pyhsical_key
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.native_value_fn(
|
||||
self.coordinator.device.config, PhysicalKey(self.pyhsical_key)
|
||||
)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the value."""
|
||||
if value != self.native_value:
|
||||
try:
|
||||
await self.entity_description.set_value_fn(
|
||||
self.coordinator.device, value, PhysicalKey(self.pyhsical_key)
|
||||
)
|
||||
except RequestNotSuccessful as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="number_exception_key",
|
||||
translation_placeholders={
|
||||
"key": self.entity_description.key,
|
||||
"value": str(value),
|
||||
"physical_key": str(self.pyhsical_key),
|
||||
},
|
||||
) from exc
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class LaMarzoccoScaleTargetNumberEntity(
|
||||
LaMarzoccoKeyNumberEntity, LaMarzoccScaleEntity
|
||||
):
|
||||
"""Entity representing a key number on the scale."""
|
||||
|
||||
entity_description: LaMarzoccoKeyNumberEntityDescription
|
||||
|
@ -2,18 +2,18 @@
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from pylamarzocco.const import (
|
||||
MachineModel,
|
||||
PhysicalKey,
|
||||
PrebrewMode,
|
||||
SmartStandbyMode,
|
||||
SteamLevel,
|
||||
ModelName,
|
||||
PreExtractionMode,
|
||||
SmartStandByType,
|
||||
SteamTargetLevel,
|
||||
WidgetType,
|
||||
)
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
from pylamarzocco.devices import LaMarzoccoMachine
|
||||
from pylamarzocco.exceptions import RequestNotSuccessful
|
||||
from pylamarzocco.models import LaMarzoccoMachineConfig
|
||||
from pylamarzocco.models import PreBrewing, SteamBoilerLevel
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@ -23,30 +23,29 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LaMarzoccoConfigEntry
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
STEAM_LEVEL_HA_TO_LM = {
|
||||
"1": SteamLevel.LEVEL_1,
|
||||
"2": SteamLevel.LEVEL_2,
|
||||
"3": SteamLevel.LEVEL_3,
|
||||
"1": SteamTargetLevel.LEVEL_1,
|
||||
"2": SteamTargetLevel.LEVEL_2,
|
||||
"3": SteamTargetLevel.LEVEL_3,
|
||||
}
|
||||
|
||||
STEAM_LEVEL_LM_TO_HA = {value: key for key, value in STEAM_LEVEL_HA_TO_LM.items()}
|
||||
|
||||
PREBREW_MODE_HA_TO_LM = {
|
||||
"disabled": PrebrewMode.DISABLED,
|
||||
"prebrew": PrebrewMode.PREBREW,
|
||||
"prebrew_enabled": PrebrewMode.PREBREW_ENABLED,
|
||||
"preinfusion": PrebrewMode.PREINFUSION,
|
||||
"disabled": PreExtractionMode.DISABLED,
|
||||
"prebrew": PreExtractionMode.PREBREWING,
|
||||
"preinfusion": PreExtractionMode.PREINFUSION,
|
||||
}
|
||||
|
||||
PREBREW_MODE_LM_TO_HA = {value: key for key, value in PREBREW_MODE_HA_TO_LM.items()}
|
||||
|
||||
STANDBY_MODE_HA_TO_LM = {
|
||||
"power_on": SmartStandbyMode.POWER_ON,
|
||||
"last_brewing": SmartStandbyMode.LAST_BREWING,
|
||||
"power_on": SmartStandByType.POWER_ON,
|
||||
"last_brewing": SmartStandByType.LAST_BREW,
|
||||
}
|
||||
|
||||
STANDBY_MODE_LM_TO_HA = {value: key for key, value in STANDBY_MODE_HA_TO_LM.items()}
|
||||
@ -59,7 +58,7 @@ class LaMarzoccoSelectEntityDescription(
|
||||
):
|
||||
"""Description of a La Marzocco select entity."""
|
||||
|
||||
current_option_fn: Callable[[LaMarzoccoMachineConfig], str | None]
|
||||
current_option_fn: Callable[[LaMarzoccoMachine], str | None]
|
||||
select_option_fn: Callable[[LaMarzoccoMachine, str], Coroutine[Any, Any, bool]]
|
||||
|
||||
|
||||
@ -71,25 +70,36 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
|
||||
select_option_fn=lambda machine, option: machine.set_steam_level(
|
||||
STEAM_LEVEL_HA_TO_LM[option]
|
||||
),
|
||||
current_option_fn=lambda config: STEAM_LEVEL_LM_TO_HA[config.steam_level],
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
== MachineModel.LINEA_MICRA,
|
||||
current_option_fn=lambda machine: STEAM_LEVEL_LM_TO_HA[
|
||||
cast(
|
||||
SteamBoilerLevel,
|
||||
machine.dashboard.config[WidgetType.CM_STEAM_BOILER_LEVEL],
|
||||
).target_level
|
||||
],
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI_R, ModelName.LINEA_MICRA)
|
||||
),
|
||||
),
|
||||
LaMarzoccoSelectEntityDescription(
|
||||
key="prebrew_infusion_select",
|
||||
translation_key="prebrew_infusion_select",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options=["disabled", "prebrew", "preinfusion"],
|
||||
select_option_fn=lambda machine, option: machine.set_prebrew_mode(
|
||||
select_option_fn=lambda machine, option: machine.set_pre_extraction_mode(
|
||||
PREBREW_MODE_HA_TO_LM[option]
|
||||
),
|
||||
current_option_fn=lambda config: PREBREW_MODE_LM_TO_HA[config.prebrew_mode],
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
in (
|
||||
MachineModel.GS3_AV,
|
||||
MachineModel.LINEA_MICRA,
|
||||
MachineModel.LINEA_MINI,
|
||||
MachineModel.LINEA_MINI_R,
|
||||
current_option_fn=lambda machine: PREBREW_MODE_LM_TO_HA[
|
||||
cast(PreBrewing, machine.dashboard.config[WidgetType.CM_PRE_BREWING]).mode
|
||||
],
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (
|
||||
ModelName.LINEA_MICRA,
|
||||
ModelName.LINEA_MINI,
|
||||
ModelName.LINEA_MINI_R,
|
||||
ModelName.GS3_AV,
|
||||
)
|
||||
),
|
||||
),
|
||||
LaMarzoccoSelectEntityDescription(
|
||||
@ -98,32 +108,16 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options=["power_on", "last_brewing"],
|
||||
select_option_fn=lambda machine, option: machine.set_smart_standby(
|
||||
enabled=machine.config.smart_standby.enabled,
|
||||
enabled=machine.schedule.smart_wake_up_sleep.smart_stand_by_enabled,
|
||||
mode=STANDBY_MODE_HA_TO_LM[option],
|
||||
minutes=machine.config.smart_standby.minutes,
|
||||
minutes=machine.schedule.smart_wake_up_sleep.smart_stand_by_minutes,
|
||||
),
|
||||
current_option_fn=lambda config: STANDBY_MODE_LM_TO_HA[
|
||||
config.smart_standby.mode
|
||||
current_option_fn=lambda machine: STANDBY_MODE_LM_TO_HA[
|
||||
machine.schedule.smart_wake_up_sleep.smart_stand_by_after
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
SCALE_ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
|
||||
LaMarzoccoSelectEntityDescription(
|
||||
key="active_bbw",
|
||||
translation_key="active_bbw",
|
||||
options=["a", "b"],
|
||||
select_option_fn=lambda machine, option: machine.set_active_bbw_recipe(
|
||||
PhysicalKey[option.upper()]
|
||||
),
|
||||
current_option_fn=lambda config: (
|
||||
config.bbw_settings.active_dose.name.lower()
|
||||
if config.bbw_settings
|
||||
else None
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@ -133,30 +127,11 @@ async def async_setup_entry(
|
||||
"""Set up select entities."""
|
||||
coordinator = entry.runtime_data.config_coordinator
|
||||
|
||||
entities = [
|
||||
async_add_entities(
|
||||
LaMarzoccoSelectEntity(coordinator, description)
|
||||
for description in ENTITIES
|
||||
if description.supported_fn(coordinator)
|
||||
]
|
||||
|
||||
if (
|
||||
coordinator.device.model in (MachineModel.LINEA_MINI, MachineModel.LINEA_MINI_R)
|
||||
and coordinator.device.config.scale
|
||||
):
|
||||
entities.extend(
|
||||
LaMarzoccoScaleSelectEntity(coordinator, description)
|
||||
for description in SCALE_ENTITIES
|
||||
)
|
||||
|
||||
def _async_add_new_scale() -> None:
|
||||
async_add_entities(
|
||||
LaMarzoccoScaleSelectEntity(coordinator, description)
|
||||
for description in SCALE_ENTITIES
|
||||
)
|
||||
|
||||
coordinator.new_device_callback.append(_async_add_new_scale)
|
||||
|
||||
async_add_entities(entities)
|
||||
)
|
||||
|
||||
|
||||
class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity):
|
||||
@ -167,9 +142,7 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity):
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the current selected option."""
|
||||
return str(
|
||||
self.entity_description.current_option_fn(self.coordinator.device.config)
|
||||
)
|
||||
return self.entity_description.current_option_fn(self.coordinator.device)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
@ -188,9 +161,3 @@ class LaMarzoccoSelectEntity(LaMarzoccoEntity, SelectEntity):
|
||||
},
|
||||
) from exc
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class LaMarzoccoScaleSelectEntity(LaMarzoccoSelectEntity, LaMarzoccScaleEntity):
|
||||
"""Select entity for La Marzocco scales."""
|
||||
|
||||
entity_description: LaMarzoccoSelectEntityDescription
|
||||
|
@ -1,226 +0,0 @@
|
||||
"""Sensor platform for La Marzocco espresso machines."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pylamarzocco.const import KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator
|
||||
from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription, LaMarzoccScaleEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LaMarzoccoSensorEntityDescription(
|
||||
LaMarzoccoEntityDescription, SensorEntityDescription
|
||||
):
|
||||
"""Description of a La Marzocco sensor."""
|
||||
|
||||
value_fn: Callable[[LaMarzoccoMachine], float | int]
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LaMarzoccoKeySensorEntityDescription(
|
||||
LaMarzoccoEntityDescription, SensorEntityDescription
|
||||
):
|
||||
"""Description of a keyed La Marzocco sensor."""
|
||||
|
||||
value_fn: Callable[[LaMarzoccoMachine, PhysicalKey], int | None]
|
||||
|
||||
|
||||
ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="shot_timer",
|
||||
translation_key="shot_timer",
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
value_fn=lambda device: device.config.brew_active_duration,
|
||||
available_fn=lambda device: device.websocket_connected,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
supported_fn=lambda coordinator: coordinator.local_connection_configured,
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="current_temp_coffee",
|
||||
translation_key="current_temp_coffee",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda device: device.config.boilers[
|
||||
BoilerType.COFFEE
|
||||
].current_temperature,
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="current_temp_steam",
|
||||
translation_key="current_temp_steam",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
value_fn=lambda device: device.config.boilers[
|
||||
BoilerType.STEAM
|
||||
].current_temperature,
|
||||
supported_fn=lambda coordinator: coordinator.device.model
|
||||
not in (MachineModel.LINEA_MINI, MachineModel.LINEA_MINI_R),
|
||||
),
|
||||
)
|
||||
|
||||
STATISTIC_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="drink_stats_coffee",
|
||||
translation_key="drink_stats_coffee",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda device: device.statistics.total_coffee,
|
||||
available_fn=lambda device: len(device.statistics.drink_stats) > 0,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="drink_stats_flushing",
|
||||
translation_key="drink_stats_flushing",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda device: device.statistics.total_flushes,
|
||||
available_fn=lambda device: len(device.statistics.drink_stats) > 0,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
KEY_STATISTIC_ENTITIES: tuple[LaMarzoccoKeySensorEntityDescription, ...] = (
|
||||
LaMarzoccoKeySensorEntityDescription(
|
||||
key="drink_stats_coffee_key",
|
||||
translation_key="drink_stats_coffee_key",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda device, key: device.statistics.drink_stats.get(key),
|
||||
available_fn=lambda device: len(device.statistics.drink_stats) > 0,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
SCALE_ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="scale_battery",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
value_fn=lambda device: (
|
||||
device.config.scale.battery if device.config.scale else 0
|
||||
),
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.model
|
||||
in (MachineModel.LINEA_MINI, MachineModel.LINEA_MINI_R)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: LaMarzoccoConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensor entities."""
|
||||
config_coordinator = entry.runtime_data.config_coordinator
|
||||
|
||||
entities: list[LaMarzoccoSensorEntity | LaMarzoccoKeySensorEntity] = []
|
||||
|
||||
entities = [
|
||||
LaMarzoccoSensorEntity(config_coordinator, description)
|
||||
for description in ENTITIES
|
||||
if description.supported_fn(config_coordinator)
|
||||
]
|
||||
|
||||
if (
|
||||
config_coordinator.device.model
|
||||
in (MachineModel.LINEA_MINI, MachineModel.LINEA_MINI_R)
|
||||
and config_coordinator.device.config.scale
|
||||
):
|
||||
entities.extend(
|
||||
LaMarzoccoScaleSensorEntity(config_coordinator, description)
|
||||
for description in SCALE_ENTITIES
|
||||
)
|
||||
|
||||
statistics_coordinator = entry.runtime_data.statistics_coordinator
|
||||
entities.extend(
|
||||
LaMarzoccoSensorEntity(statistics_coordinator, description)
|
||||
for description in STATISTIC_ENTITIES
|
||||
if description.supported_fn(statistics_coordinator)
|
||||
)
|
||||
|
||||
num_keys = KEYS_PER_MODEL[MachineModel(config_coordinator.device.model)]
|
||||
if num_keys > 0:
|
||||
entities.extend(
|
||||
LaMarzoccoKeySensorEntity(statistics_coordinator, description, key)
|
||||
for description in KEY_STATISTIC_ENTITIES
|
||||
for key in range(1, num_keys + 1)
|
||||
)
|
||||
|
||||
def _async_add_new_scale() -> None:
|
||||
async_add_entities(
|
||||
LaMarzoccoScaleSensorEntity(config_coordinator, description)
|
||||
for description in SCALE_ENTITIES
|
||||
)
|
||||
|
||||
config_coordinator.new_device_callback.append(_async_add_new_scale)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class LaMarzoccoSensorEntity(LaMarzoccoEntity, SensorEntity):
|
||||
"""Sensor representing espresso machine temperature data."""
|
||||
|
||||
entity_description: LaMarzoccoSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
"""State of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.device)
|
||||
|
||||
|
||||
class LaMarzoccoKeySensorEntity(LaMarzoccoEntity, SensorEntity):
|
||||
"""Sensor for a La Marzocco key."""
|
||||
|
||||
entity_description: LaMarzoccoKeySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LaMarzoccoUpdateCoordinator,
|
||||
description: LaMarzoccoKeySensorEntityDescription,
|
||||
key: int,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, description)
|
||||
self.key = key
|
||||
self._attr_translation_placeholders = {"key": str(key)}
|
||||
self._attr_unique_id = f"{super()._attr_unique_id}_key{key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | None:
|
||||
"""State of the sensor."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.device, PhysicalKey(self.key)
|
||||
)
|
||||
|
||||
|
||||
class LaMarzoccoScaleSensorEntity(LaMarzoccoSensorEntity, LaMarzoccScaleEntity):
|
||||
"""Sensor for a La Marzocco scale."""
|
||||
|
||||
entity_description: LaMarzoccoSensorEntityDescription
|
@ -32,13 +32,11 @@
|
||||
}
|
||||
},
|
||||
"machine_selection": {
|
||||
"description": "Select the machine you want to integrate. Set the \"IP\" to get access to shot time related sensors.",
|
||||
"description": "Select the machine you want to integrate.",
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::ip%]",
|
||||
"machine": "Machine"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "Local IP address of the machine",
|
||||
"machine": "Select the machine you want to integrate"
|
||||
}
|
||||
},
|
||||
@ -101,54 +99,16 @@
|
||||
"coffee_temp": {
|
||||
"name": "Coffee target temperature"
|
||||
},
|
||||
"dose_key": {
|
||||
"name": "Dose Key {key}"
|
||||
},
|
||||
"prebrew_on": {
|
||||
"name": "Prebrew on time"
|
||||
},
|
||||
"prebrew_on_key": {
|
||||
"name": "Prebrew on time Key {key}"
|
||||
},
|
||||
"prebrew_off": {
|
||||
"name": "Prebrew off time"
|
||||
},
|
||||
"prebrew_off_key": {
|
||||
"name": "Prebrew off time Key {key}"
|
||||
},
|
||||
"preinfusion_off": {
|
||||
"name": "Preinfusion time"
|
||||
},
|
||||
"preinfusion_off_key": {
|
||||
"name": "Preinfusion time Key {key}"
|
||||
},
|
||||
"scale_target_key": {
|
||||
"name": "Brew by weight target {key}"
|
||||
},
|
||||
"smart_standby_time": {
|
||||
"name": "Smart standby time"
|
||||
},
|
||||
"steam_temp": {
|
||||
"name": "Steam target temperature"
|
||||
},
|
||||
"tea_water_duration": {
|
||||
"name": "Tea water duration"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"active_bbw": {
|
||||
"name": "Active brew by weight recipe",
|
||||
"state": {
|
||||
"a": "Recipe A",
|
||||
"b": "Recipe B"
|
||||
}
|
||||
},
|
||||
"prebrew_infusion_select": {
|
||||
"name": "Prebrew/-infusion mode",
|
||||
"state": {
|
||||
"disabled": "[%key:common::state::disabled%]",
|
||||
"prebrew": "Prebrew",
|
||||
"prebrew_enabled": "Prebrew",
|
||||
"preinfusion": "Preinfusion"
|
||||
}
|
||||
},
|
||||
@ -168,29 +128,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"current_temp_coffee": {
|
||||
"name": "Current coffee temperature"
|
||||
},
|
||||
"current_temp_steam": {
|
||||
"name": "Current steam temperature"
|
||||
},
|
||||
"drink_stats_coffee": {
|
||||
"name": "Total coffees made",
|
||||
"unit_of_measurement": "coffees"
|
||||
},
|
||||
"drink_stats_coffee_key": {
|
||||
"name": "Coffees made Key {key}",
|
||||
"unit_of_measurement": "coffees"
|
||||
},
|
||||
"drink_stats_flushing": {
|
||||
"name": "Total flushes made",
|
||||
"unit_of_measurement": "flushes"
|
||||
},
|
||||
"shot_timer": {
|
||||
"name": "Shot timer"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"auto_on_off": {
|
||||
"name": "Auto on/off ({id})"
|
||||
@ -233,9 +170,6 @@
|
||||
"number_exception": {
|
||||
"message": "Error while setting value {value} for number {key}"
|
||||
},
|
||||
"number_exception_key": {
|
||||
"message": "Error while setting value {value} for number {key}, key {physical_key}"
|
||||
},
|
||||
"select_option_error": {
|
||||
"message": "Error while setting select option {option} for {key}"
|
||||
},
|
||||
|
@ -2,12 +2,17 @@
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from pylamarzocco.const import BoilerType
|
||||
from pylamarzocco.devices.machine import LaMarzoccoMachine
|
||||
from pylamarzocco import LaMarzoccoMachine
|
||||
from pylamarzocco.const import MachineMode, ModelName, WidgetType
|
||||
from pylamarzocco.exceptions import RequestNotSuccessful
|
||||
from pylamarzocco.models import LaMarzoccoMachineConfig
|
||||
from pylamarzocco.models import (
|
||||
MachineStatus,
|
||||
SteamBoilerLevel,
|
||||
SteamBoilerTemperature,
|
||||
WakeUpScheduleSettings,
|
||||
)
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@ -30,7 +35,7 @@ class LaMarzoccoSwitchEntityDescription(
|
||||
"""Description of a La Marzocco Switch."""
|
||||
|
||||
control_fn: Callable[[LaMarzoccoMachine, bool], Coroutine[Any, Any, bool]]
|
||||
is_on_fn: Callable[[LaMarzoccoMachineConfig], bool]
|
||||
is_on_fn: Callable[[LaMarzoccoMachine], bool]
|
||||
|
||||
|
||||
ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
|
||||
@ -39,13 +44,42 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
|
||||
translation_key="main",
|
||||
name=None,
|
||||
control_fn=lambda machine, state: machine.set_power(state),
|
||||
is_on_fn=lambda config: config.turned_on,
|
||||
is_on_fn=(
|
||||
lambda machine: cast(
|
||||
MachineStatus, machine.dashboard.config[WidgetType.CM_MACHINE_STATUS]
|
||||
).mode
|
||||
is MachineMode.BREWING_MODE
|
||||
),
|
||||
),
|
||||
LaMarzoccoSwitchEntityDescription(
|
||||
key="steam_boiler_enable",
|
||||
translation_key="steam_boiler",
|
||||
control_fn=lambda machine, state: machine.set_steam(state),
|
||||
is_on_fn=lambda config: config.boilers[BoilerType.STEAM].enabled,
|
||||
is_on_fn=(
|
||||
lambda machine: cast(
|
||||
SteamBoilerLevel,
|
||||
machine.dashboard.config[WidgetType.CM_STEAM_BOILER_LEVEL],
|
||||
).enabled
|
||||
),
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI_R, ModelName.LINEA_MICRA)
|
||||
),
|
||||
),
|
||||
LaMarzoccoSwitchEntityDescription(
|
||||
key="steam_boiler_enable",
|
||||
translation_key="steam_boiler",
|
||||
control_fn=lambda machine, state: machine.set_steam(state),
|
||||
is_on_fn=(
|
||||
lambda machine: cast(
|
||||
SteamBoilerTemperature,
|
||||
machine.dashboard.config[WidgetType.CM_STEAM_BOILER_TEMPERATURE],
|
||||
).enabled
|
||||
),
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
not in (ModelName.LINEA_MINI_R, ModelName.LINEA_MICRA)
|
||||
),
|
||||
),
|
||||
LaMarzoccoSwitchEntityDescription(
|
||||
key="smart_standby_enabled",
|
||||
@ -53,10 +87,10 @@ ENTITIES: tuple[LaMarzoccoSwitchEntityDescription, ...] = (
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
control_fn=lambda machine, state: machine.set_smart_standby(
|
||||
enabled=state,
|
||||
mode=machine.config.smart_standby.mode,
|
||||
minutes=machine.config.smart_standby.minutes,
|
||||
mode=machine.schedule.smart_wake_up_sleep.smart_stand_by_after,
|
||||
minutes=machine.schedule.smart_wake_up_sleep.smart_stand_by_minutes,
|
||||
),
|
||||
is_on_fn=lambda config: config.smart_standby.enabled,
|
||||
is_on_fn=lambda machine: machine.schedule.smart_wake_up_sleep.smart_stand_by_enabled,
|
||||
),
|
||||
)
|
||||
|
||||
@ -78,8 +112,8 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
entities.extend(
|
||||
LaMarzoccoAutoOnOffSwitchEntity(coordinator, wake_up_sleep_entry_id)
|
||||
for wake_up_sleep_entry_id in coordinator.device.config.wake_up_sleep_entries
|
||||
LaMarzoccoAutoOnOffSwitchEntity(coordinator, wake_up_sleep_entry)
|
||||
for wake_up_sleep_entry in coordinator.device.schedule.smart_wake_up_sleep.schedules
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
@ -117,7 +151,7 @@ class LaMarzoccoSwitchEntity(LaMarzoccoEntity, SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if device is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator.device.config)
|
||||
return self.entity_description.is_on_fn(self.coordinator.device)
|
||||
|
||||
|
||||
class LaMarzoccoAutoOnOffSwitchEntity(LaMarzoccoBaseEntity, SwitchEntity):
|
||||
@ -129,22 +163,21 @@ class LaMarzoccoAutoOnOffSwitchEntity(LaMarzoccoBaseEntity, SwitchEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LaMarzoccoUpdateCoordinator,
|
||||
identifier: str,
|
||||
schedule_entry: WakeUpScheduleSettings,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator, f"auto_on_off_{identifier}")
|
||||
self._identifier = identifier
|
||||
self._attr_translation_placeholders = {"id": identifier}
|
||||
self.entity_category = EntityCategory.CONFIG
|
||||
super().__init__(coordinator, f"auto_on_off_{schedule_entry.identifier}")
|
||||
assert schedule_entry.identifier
|
||||
self._schedule_entry = schedule_entry
|
||||
self._identifier = schedule_entry.identifier
|
||||
self._attr_translation_placeholders = {"id": schedule_entry.identifier}
|
||||
self._attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
async def _async_enable(self, state: bool) -> None:
|
||||
"""Enable or disable the auto on/off schedule."""
|
||||
wake_up_sleep_entry = self.coordinator.device.config.wake_up_sleep_entries[
|
||||
self._identifier
|
||||
]
|
||||
wake_up_sleep_entry.enabled = state
|
||||
self._schedule_entry.enabled = state
|
||||
try:
|
||||
await self.coordinator.device.set_wake_up_sleep(wake_up_sleep_entry)
|
||||
await self.coordinator.device.set_wakeup_schedule(self._schedule_entry)
|
||||
except RequestNotSuccessful as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
@ -164,6 +197,4 @@ class LaMarzoccoAutoOnOffSwitchEntity(LaMarzoccoBaseEntity, SwitchEntity):
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if switch is on."""
|
||||
return self.coordinator.device.config.wake_up_sleep_entries[
|
||||
self._identifier
|
||||
].enabled
|
||||
return self._schedule_entry.enabled
|
||||
|
@ -59,7 +59,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Create update entities."""
|
||||
|
||||
coordinator = entry.runtime_data.firmware_coordinator
|
||||
coordinator = entry.runtime_data.settings_coordinator
|
||||
async_add_entities(
|
||||
LaMarzoccoUpdateEntity(coordinator, description)
|
||||
for description in ENTITIES
|
||||
@ -74,18 +74,20 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str | None:
|
||||
def installed_version(self) -> str:
|
||||
"""Return the current firmware version."""
|
||||
return self.coordinator.device.firmware[
|
||||
return self.coordinator.device.settings.firmwares[
|
||||
self.entity_description.component
|
||||
].current_version
|
||||
].build_version
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return the latest firmware version."""
|
||||
return self.coordinator.device.firmware[
|
||||
if available_update := self.coordinator.device.settings.firmwares[
|
||||
self.entity_description.component
|
||||
].latest_version
|
||||
].available_update:
|
||||
return available_update.build_version
|
||||
return self.installed_version
|
||||
|
||||
@property
|
||||
def release_url(self) -> str | None:
|
||||
@ -99,9 +101,7 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
||||
self._attr_in_progress = True
|
||||
self.async_write_ha_state()
|
||||
try:
|
||||
success = await self.coordinator.device.update_firmware(
|
||||
self.entity_description.component
|
||||
)
|
||||
await self.coordinator.device.update_firmware()
|
||||
except RequestNotSuccessful as exc:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
@ -110,13 +110,5 @@ class LaMarzoccoUpdateEntity(LaMarzoccoEntity, UpdateEntity):
|
||||
"key": self.entity_description.key,
|
||||
},
|
||||
) from exc
|
||||
if not success:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={
|
||||
"key": self.entity_description.key,
|
||||
},
|
||||
)
|
||||
self._attr_in_progress = False
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
@ -251,7 +251,7 @@ DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.UPDATE,
|
||||
entity_description=UpdateEntityDescription(
|
||||
key="MatterUpdate", device_class=UpdateDeviceClass.FIRMWARE, name=None
|
||||
key="MatterUpdate", device_class=UpdateDeviceClass.FIRMWARE
|
||||
),
|
||||
entity_class=MatterUpdate,
|
||||
required_attributes=(
|
||||
|
@ -68,7 +68,12 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .browse_media import BrowseMedia, async_process_play_media_url # noqa: F401
|
||||
from .browse_media import ( # noqa: F401
|
||||
BrowseMedia,
|
||||
SearchMedia,
|
||||
SearchMediaQuery,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from .const import ( # noqa: F401
|
||||
_DEPRECATED_MEDIA_CLASS_DIRECTORY,
|
||||
_DEPRECATED_SUPPORT_BROWSE_MEDIA,
|
||||
@ -107,10 +112,12 @@ from .const import ( # noqa: F401
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
ATTR_MEDIA_EPISODE,
|
||||
ATTR_MEDIA_EXTRA,
|
||||
ATTR_MEDIA_FILTER_CLASSES,
|
||||
ATTR_MEDIA_PLAYLIST,
|
||||
ATTR_MEDIA_POSITION,
|
||||
ATTR_MEDIA_POSITION_UPDATED_AT,
|
||||
ATTR_MEDIA_REPEAT,
|
||||
ATTR_MEDIA_SEARCH_QUERY,
|
||||
ATTR_MEDIA_SEASON,
|
||||
ATTR_MEDIA_SEEK_POSITION,
|
||||
ATTR_MEDIA_SERIES_TITLE,
|
||||
@ -128,6 +135,7 @@ from .const import ( # noqa: F401
|
||||
SERVICE_CLEAR_PLAYLIST,
|
||||
SERVICE_JOIN,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
SERVICE_SEARCH_MEDIA,
|
||||
SERVICE_SELECT_SOUND_MODE,
|
||||
SERVICE_SELECT_SOURCE,
|
||||
SERVICE_UNJOIN,
|
||||
@ -137,7 +145,7 @@ from .const import ( # noqa: F401
|
||||
MediaType,
|
||||
RepeatMode,
|
||||
)
|
||||
from .errors import BrowseError
|
||||
from .errors import BrowseError, SearchError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -291,6 +299,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_browse_media)
|
||||
websocket_api.async_register_command(hass, websocket_search_media)
|
||||
hass.http.register_view(MediaPlayerImageView(component))
|
||||
|
||||
await component.async_setup(config)
|
||||
@ -447,6 +456,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"async_browse_media",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SEARCH_MEDIA,
|
||||
{
|
||||
vol.Optional(ATTR_MEDIA_CONTENT_TYPE): cv.string,
|
||||
vol.Optional(ATTR_MEDIA_CONTENT_ID): cv.string,
|
||||
vol.Required(ATTR_MEDIA_SEARCH_QUERY): cv.string,
|
||||
vol.Optional(ATTR_MEDIA_FILTER_CLASSES): vol.All(
|
||||
cv.ensure_list,
|
||||
[vol.In([m.value for m in MediaClass])],
|
||||
lambda x: {MediaClass(item) for item in x},
|
||||
),
|
||||
},
|
||||
"async_internal_search_media",
|
||||
[MediaPlayerEntityFeature.SEARCH_MEDIA],
|
||||
SupportsResponse.ONLY,
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_SHUFFLE_SET,
|
||||
{vol.Required(ATTR_MEDIA_SHUFFLE): cv.boolean},
|
||||
@ -1157,6 +1182,29 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_internal_search_media(
|
||||
self,
|
||||
search_query: str,
|
||||
media_content_type: MediaType | str | None = None,
|
||||
media_content_id: str | None = None,
|
||||
media_filter_classes: list[MediaClass] | None = None,
|
||||
) -> SearchMedia:
|
||||
return await self.async_search_media(
|
||||
query=SearchMediaQuery(
|
||||
search_query=search_query,
|
||||
media_content_type=media_content_type,
|
||||
media_content_id=media_content_id,
|
||||
media_filter_classes=media_filter_classes,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_search_media(
|
||||
self,
|
||||
query: SearchMediaQuery,
|
||||
) -> SearchMedia:
|
||||
"""Search the media player."""
|
||||
raise NotImplementedError
|
||||
|
||||
def join_players(self, group_members: list[str]) -> None:
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
raise NotImplementedError
|
||||
@ -1360,6 +1408,75 @@ async def websocket_browse_media(
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "media_player/search_media",
|
||||
vol.Required("entity_id"): cv.entity_id,
|
||||
vol.Inclusive(
|
||||
ATTR_MEDIA_CONTENT_TYPE,
|
||||
"media_ids",
|
||||
"media_content_type and media_content_id must be provided together",
|
||||
): str,
|
||||
vol.Inclusive(
|
||||
ATTR_MEDIA_CONTENT_ID,
|
||||
"media_ids",
|
||||
"media_content_type and media_content_id must be provided together",
|
||||
): str,
|
||||
vol.Required(ATTR_MEDIA_SEARCH_QUERY): str,
|
||||
vol.Optional(ATTR_MEDIA_FILTER_CLASSES): vol.All(
|
||||
cv.ensure_list,
|
||||
[vol.In([m.value for m in MediaClass])],
|
||||
lambda x: {MediaClass(item) for item in x},
|
||||
),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_search_media(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.connection.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Search media available to the media_player entity.
|
||||
|
||||
To use, media_player integrations can implement
|
||||
MediaPlayerEntity.async_search_media()
|
||||
"""
|
||||
player = hass.data[DATA_COMPONENT].get_entity(msg["entity_id"])
|
||||
|
||||
if player is None:
|
||||
connection.send_error(msg["id"], "entity_not_found", "Entity not found")
|
||||
return
|
||||
|
||||
if MediaPlayerEntityFeature.SEARCH_MEDIA not in player.supported_features_compat:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(
|
||||
msg["id"], ERR_NOT_SUPPORTED, "Player does not support searching media"
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
media_content_type = msg.get(ATTR_MEDIA_CONTENT_TYPE)
|
||||
media_content_id = msg.get(ATTR_MEDIA_CONTENT_ID)
|
||||
query = str(msg.get(ATTR_MEDIA_SEARCH_QUERY))
|
||||
media_filter_classes = msg.get(ATTR_MEDIA_FILTER_CLASSES, [])
|
||||
|
||||
try:
|
||||
payload = await player.async_internal_search_media(
|
||||
query,
|
||||
media_content_type,
|
||||
media_content_id,
|
||||
media_filter_classes,
|
||||
)
|
||||
except SearchError as err:
|
||||
connection.send_message(
|
||||
websocket_api.error_message(msg["id"], ERR_UNKNOWN_ERROR, str(err))
|
||||
)
|
||||
return
|
||||
|
||||
result = payload.as_dict()
|
||||
connection.send_result(msg["id"], result)
|
||||
|
||||
|
||||
_FETCH_TIMEOUT = aiohttp.ClientTimeout(total=10)
|
||||
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@ -109,6 +110,7 @@ class BrowseMedia:
|
||||
children_media_class: MediaClass | str | None = None,
|
||||
thumbnail: str | None = None,
|
||||
not_shown: int = 0,
|
||||
can_search: bool = False,
|
||||
) -> None:
|
||||
"""Initialize browse media item."""
|
||||
self.media_class = media_class
|
||||
@ -121,6 +123,7 @@ class BrowseMedia:
|
||||
self.children_media_class = children_media_class
|
||||
self.thumbnail = thumbnail
|
||||
self.not_shown = not_shown
|
||||
self.can_search = can_search
|
||||
|
||||
def as_dict(self, *, parent: bool = True) -> dict[str, Any]:
|
||||
"""Convert Media class to browse media dictionary."""
|
||||
@ -135,6 +138,7 @@ class BrowseMedia:
|
||||
"children_media_class": self.children_media_class,
|
||||
"can_play": self.can_play,
|
||||
"can_expand": self.can_expand,
|
||||
"can_search": self.can_search,
|
||||
"thumbnail": self.thumbnail,
|
||||
}
|
||||
|
||||
@ -163,3 +167,27 @@ class BrowseMedia:
|
||||
def __repr__(self) -> str:
|
||||
"""Return representation of browse media."""
|
||||
return f"<BrowseMedia {self.title} ({self.media_class})>"
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class SearchMedia:
|
||||
"""Represent search results."""
|
||||
|
||||
version: int = field(default=1)
|
||||
result: list[BrowseMedia]
|
||||
|
||||
def as_dict(self, *, parent: bool = True) -> dict[str, Any]:
|
||||
"""Convert SearchMedia class to browse media dictionary."""
|
||||
return {
|
||||
"result": [item.as_dict(parent=parent) for item in self.result],
|
||||
}
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class SearchMediaQuery:
|
||||
"""Represent a search media file."""
|
||||
|
||||
search_query: str
|
||||
media_content_type: MediaType | str | None = field(default=None)
|
||||
media_content_id: str | None = None
|
||||
media_filter_classes: list[MediaClass] | None = field(default=None)
|
||||
|
@ -26,6 +26,8 @@ ATTR_MEDIA_ARTIST = "media_artist"
|
||||
ATTR_MEDIA_CHANNEL = "media_channel"
|
||||
ATTR_MEDIA_CONTENT_ID = "media_content_id"
|
||||
ATTR_MEDIA_CONTENT_TYPE = "media_content_type"
|
||||
ATTR_MEDIA_SEARCH_QUERY = "search_query"
|
||||
ATTR_MEDIA_FILTER_CLASSES = "media_filter_classes"
|
||||
ATTR_MEDIA_DURATION = "media_duration"
|
||||
ATTR_MEDIA_ENQUEUE = "enqueue"
|
||||
ATTR_MEDIA_EXTRA = "extra"
|
||||
@ -174,6 +176,7 @@ SERVICE_CLEAR_PLAYLIST = "clear_playlist"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_PLAY_MEDIA = "play_media"
|
||||
SERVICE_BROWSE_MEDIA = "browse_media"
|
||||
SERVICE_SEARCH_MEDIA = "search_media"
|
||||
SERVICE_SELECT_SOUND_MODE = "select_sound_mode"
|
||||
SERVICE_SELECT_SOURCE = "select_source"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
@ -220,6 +223,7 @@ class MediaPlayerEntityFeature(IntFlag):
|
||||
GROUPING = 524288
|
||||
MEDIA_ANNOUNCE = 1048576
|
||||
MEDIA_ENQUEUE = 2097152
|
||||
SEARCH_MEDIA = 4194304
|
||||
|
||||
|
||||
# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5.
|
||||
|
@ -9,3 +9,7 @@ class MediaPlayerException(HomeAssistantError):
|
||||
|
||||
class BrowseError(MediaPlayerException):
|
||||
"""Error while browsing."""
|
||||
|
||||
|
||||
class SearchError(MediaPlayerException):
|
||||
"""Error while searching."""
|
||||
|
@ -74,6 +74,7 @@ CONDITION_CLASSES: dict[str, list[str]] = {
|
||||
"Pluie modérée",
|
||||
"Pluie / Averses",
|
||||
"Averses",
|
||||
"Averses faibles",
|
||||
"Pluie",
|
||||
],
|
||||
ATTR_CONDITION_SNOWY: [
|
||||
@ -81,10 +82,11 @@ CONDITION_CLASSES: dict[str, list[str]] = {
|
||||
"Neige",
|
||||
"Averses de neige",
|
||||
"Neige forte",
|
||||
"Neige faible",
|
||||
"Quelques flocons",
|
||||
],
|
||||
ATTR_CONDITION_SNOWY_RAINY: ["Pluie et neige", "Pluie verglaçante"],
|
||||
ATTR_CONDITION_SUNNY: ["Ensoleillé"],
|
||||
ATTR_CONDITION_SUNNY: ["Ensoleillé", "Ciel clair"],
|
||||
ATTR_CONDITION_WINDY: [],
|
||||
ATTR_CONDITION_WINDY_VARIANT: [],
|
||||
ATTR_CONDITION_EXCEPTIONAL: [],
|
||||
|
80
homeassistant/components/miele/diagnostics.py
Normal file
80
homeassistant/components/miele/diagnostics.py
Normal file
@ -0,0 +1,80 @@
|
||||
"""Diagnostics support for Miele."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
from typing import Any, cast
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .coordinator import MieleConfigEntry
|
||||
|
||||
TO_REDACT = {"access_token", "refresh_token", "fabNumber"}
|
||||
|
||||
|
||||
def hash_identifier(key: str) -> str:
|
||||
"""Hash the identifier string."""
|
||||
return f"**REDACTED_{hashlib.sha256(key.encode()).hexdigest()[:16]}"
|
||||
|
||||
|
||||
def redact_identifiers(in_data: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Redact identifiers from the data."""
|
||||
for key in in_data:
|
||||
in_data[hash_identifier(key)] = in_data.pop(key)
|
||||
return in_data
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: MieleConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
miele_data = {
|
||||
"devices": redact_identifiers(
|
||||
{
|
||||
device_id: device_data.raw
|
||||
for device_id, device_data in config_entry.runtime_data.data.devices.items()
|
||||
}
|
||||
),
|
||||
"actions": redact_identifiers(
|
||||
{
|
||||
device_id: action_data.raw
|
||||
for device_id, action_data in config_entry.runtime_data.data.actions.items()
|
||||
}
|
||||
),
|
||||
}
|
||||
|
||||
return {
|
||||
"config_entry_data": async_redact_data(dict(config_entry.data), TO_REDACT),
|
||||
"miele_data": async_redact_data(miele_data, TO_REDACT),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, config_entry: MieleConfigEntry, device: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
info = {
|
||||
"manufacturer": device.manufacturer,
|
||||
"model": device.model,
|
||||
}
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
device_id = cast(str, device.serial_number)
|
||||
miele_data = {
|
||||
"devices": {
|
||||
hash_identifier(device_id): coordinator.data.devices[device_id].raw
|
||||
},
|
||||
"actions": {
|
||||
hash_identifier(device_id): coordinator.data.actions[device_id].raw
|
||||
},
|
||||
"programs": "Not implemented",
|
||||
}
|
||||
return {
|
||||
"info": async_redact_data(info, TO_REDACT),
|
||||
"data": async_redact_data(config_entry.data, TO_REDACT),
|
||||
"miele_data": async_redact_data(miele_data, TO_REDACT),
|
||||
}
|
@ -29,10 +29,10 @@
|
||||
"public_weather": {
|
||||
"data": {
|
||||
"area_name": "Name of the area",
|
||||
"lat_ne": "North-East corner latitude",
|
||||
"lon_ne": "North-East corner longitude",
|
||||
"lat_sw": "South-West corner latitude",
|
||||
"lon_sw": "South-West corner longitude",
|
||||
"lat_ne": "Northeast corner latitude",
|
||||
"lon_ne": "Northeast corner longitude",
|
||||
"lat_sw": "Southwest corner latitude",
|
||||
"lon_sw": "Southwest corner longitude",
|
||||
"mode": "Calculation",
|
||||
"show_on_map": "Show on map"
|
||||
},
|
||||
@ -175,7 +175,7 @@
|
||||
"state": {
|
||||
"frost_guard": "Frost guard",
|
||||
"schedule": "Schedule",
|
||||
"manual": "Manual"
|
||||
"manual": "[%key:common::state::manual%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -206,13 +206,13 @@
|
||||
"name": "Wind direction",
|
||||
"state": {
|
||||
"n": "North",
|
||||
"ne": "North-east",
|
||||
"ne": "Northeast",
|
||||
"e": "East",
|
||||
"se": "South-east",
|
||||
"se": "Southeast",
|
||||
"s": "South",
|
||||
"sw": "South-west",
|
||||
"sw": "Southwest",
|
||||
"w": "West",
|
||||
"nw": "North-west"
|
||||
"nw": "Northwest"
|
||||
}
|
||||
},
|
||||
"wind_angle": {
|
||||
|
@ -63,6 +63,7 @@ from .const import (
|
||||
RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
UNSUPPORTED_MODELS,
|
||||
WEB_SEARCH_MODELS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -160,9 +161,10 @@ class OpenAIOptionsFlow(OptionsFlow):
|
||||
errors[CONF_CHAT_MODEL] = "model_not_supported"
|
||||
|
||||
if user_input.get(CONF_WEB_SEARCH):
|
||||
if not user_input.get(
|
||||
CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL
|
||||
).startswith("gpt-4o"):
|
||||
if (
|
||||
user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
not in WEB_SEARCH_MODELS
|
||||
):
|
||||
errors[CONF_WEB_SEARCH] = "web_search_not_supported"
|
||||
elif user_input.get(CONF_WEB_SEARCH_USER_LOCATION):
|
||||
user_input.update(await self.get_location_data())
|
||||
|
@ -41,3 +41,12 @@ UNSUPPORTED_MODELS: list[str] = [
|
||||
"gpt-4o-mini-realtime-preview",
|
||||
"gpt-4o-mini-realtime-preview-2024-12-17",
|
||||
]
|
||||
|
||||
WEB_SEARCH_MODELS: list[str] = [
|
||||
"gpt-4.1",
|
||||
"gpt-4.1-mini",
|
||||
"gpt-4o",
|
||||
"gpt-4o-search-preview",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-search-preview",
|
||||
]
|
||||
|
@ -40,7 +40,7 @@
|
||||
},
|
||||
"error": {
|
||||
"model_not_supported": "This model is not supported, please select a different model",
|
||||
"web_search_not_supported": "Web search is only supported for gpt-4o and gpt-4o-mini models"
|
||||
"web_search_not_supported": "Web search is not supported by this model"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
|
@ -104,7 +104,7 @@ class LazyState(State):
|
||||
return self._last_updated_ts
|
||||
|
||||
@cached_property
|
||||
def last_changed_timestamp(self) -> float: # type: ignore[override]
|
||||
def last_changed_timestamp(self) -> float:
|
||||
"""Last changed timestamp."""
|
||||
ts = self._last_changed_ts or self._last_updated_ts
|
||||
if TYPE_CHECKING:
|
||||
@ -112,7 +112,7 @@ class LazyState(State):
|
||||
return ts
|
||||
|
||||
@cached_property
|
||||
def last_reported_timestamp(self) -> float: # type: ignore[override]
|
||||
def last_reported_timestamp(self) -> float:
|
||||
"""Last reported timestamp."""
|
||||
ts = self._last_reported_ts or self._last_updated_ts
|
||||
if TYPE_CHECKING:
|
||||
|
@ -71,11 +71,11 @@ sequence:
|
||||
title: !input dismiss_text
|
||||
- alias: "Awaiting response"
|
||||
wait_for_trigger:
|
||||
- platform: event
|
||||
- trigger: event
|
||||
event_type: mobile_app_notification_action
|
||||
event_data:
|
||||
action: "{{ action_confirm }}"
|
||||
- platform: event
|
||||
- trigger: event
|
||||
event_type: mobile_app_notification_action
|
||||
event_data:
|
||||
action: "{{ action_dismiss }}"
|
||||
|
@ -209,7 +209,7 @@ KELVIN_MIN_VALUE_COLOR: Final = 3000
|
||||
BLOCK_WRONG_SLEEP_PERIOD = 21600
|
||||
BLOCK_EXPECTED_SLEEP_PERIOD = 43200
|
||||
|
||||
UPTIME_DEVIATION: Final = 5
|
||||
UPTIME_DEVIATION: Final = 60
|
||||
|
||||
# Time to wait before reloading entry upon device config change
|
||||
ENTRY_RELOAD_COOLDOWN = 60
|
||||
|
@ -200,8 +200,18 @@ def get_device_uptime(uptime: float, last_uptime: datetime | None) -> datetime:
|
||||
|
||||
if (
|
||||
not last_uptime
|
||||
or abs((delta_uptime - last_uptime).total_seconds()) > UPTIME_DEVIATION
|
||||
or (diff := abs((delta_uptime - last_uptime).total_seconds()))
|
||||
> UPTIME_DEVIATION
|
||||
):
|
||||
if last_uptime:
|
||||
LOGGER.debug(
|
||||
"Time deviation %s > %s: uptime=%s, last_uptime=%s, delta_uptime=%s",
|
||||
diff,
|
||||
UPTIME_DEVIATION,
|
||||
uptime,
|
||||
last_uptime,
|
||||
delta_uptime,
|
||||
)
|
||||
return delta_uptime
|
||||
|
||||
return last_uptime
|
||||
|
@ -354,11 +354,11 @@
|
||||
"robot_cleaner_cleaning_mode": {
|
||||
"name": "Cleaning mode",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"stop": "[%key:common::action::stop%]",
|
||||
"auto": "[%key:common::state::auto%]",
|
||||
"manual": "[%key:common::state::manual%]",
|
||||
"part": "Partial",
|
||||
"repeat": "Repeat",
|
||||
"manual": "Manual",
|
||||
"stop": "[%key:common::action::stop%]",
|
||||
"map": "Map"
|
||||
}
|
||||
},
|
||||
|
@ -22,34 +22,34 @@ from homeassistant.helpers.network import is_internal_request
|
||||
from .const import UNPLAYABLE_TYPES
|
||||
|
||||
LIBRARY = [
|
||||
"Favorites",
|
||||
"Artists",
|
||||
"Albums",
|
||||
"Tracks",
|
||||
"Playlists",
|
||||
"Genres",
|
||||
"New Music",
|
||||
"Album Artists",
|
||||
"Apps",
|
||||
"Radios",
|
||||
"favorites",
|
||||
"artists",
|
||||
"albums",
|
||||
"tracks",
|
||||
"playlists",
|
||||
"genres",
|
||||
"new music",
|
||||
"album artists",
|
||||
"apps",
|
||||
"radios",
|
||||
]
|
||||
|
||||
MEDIA_TYPE_TO_SQUEEZEBOX: dict[str | MediaType, str] = {
|
||||
"Favorites": "favorites",
|
||||
"Artists": "artists",
|
||||
"Albums": "albums",
|
||||
"Tracks": "titles",
|
||||
"Playlists": "playlists",
|
||||
"Genres": "genres",
|
||||
"New Music": "new music",
|
||||
"Album Artists": "album artists",
|
||||
"favorites": "favorites",
|
||||
"artists": "artists",
|
||||
"albums": "albums",
|
||||
"tracks": "titles",
|
||||
"playlists": "playlists",
|
||||
"genres": "genres",
|
||||
"new music": "new music",
|
||||
"album artists": "album artists",
|
||||
MediaType.ALBUM: "album",
|
||||
MediaType.ARTIST: "artist",
|
||||
MediaType.TRACK: "title",
|
||||
MediaType.PLAYLIST: "playlist",
|
||||
MediaType.GENRE: "genre",
|
||||
"Apps": "apps",
|
||||
"Radios": "radios",
|
||||
MediaType.APPS: "apps",
|
||||
"radios": "radios",
|
||||
}
|
||||
|
||||
SQUEEZEBOX_ID_BY_TYPE: dict[str | MediaType, str] = {
|
||||
@ -58,22 +58,20 @@ SQUEEZEBOX_ID_BY_TYPE: dict[str | MediaType, str] = {
|
||||
MediaType.TRACK: "track_id",
|
||||
MediaType.PLAYLIST: "playlist_id",
|
||||
MediaType.GENRE: "genre_id",
|
||||
"Favorites": "item_id",
|
||||
"favorites": "item_id",
|
||||
MediaType.APPS: "item_id",
|
||||
}
|
||||
|
||||
CONTENT_TYPE_MEDIA_CLASS: dict[str | MediaType, dict[str, MediaClass | str]] = {
|
||||
"Favorites": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK},
|
||||
"Apps": {"item": MediaClass.DIRECTORY, "children": MediaClass.APP},
|
||||
"Radios": {"item": MediaClass.DIRECTORY, "children": MediaClass.APP},
|
||||
"App": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK},
|
||||
"Artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST},
|
||||
"Albums": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM},
|
||||
"Tracks": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK},
|
||||
"Playlists": {"item": MediaClass.DIRECTORY, "children": MediaClass.PLAYLIST},
|
||||
"Genres": {"item": MediaClass.DIRECTORY, "children": MediaClass.GENRE},
|
||||
"New Music": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM},
|
||||
"Album Artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST},
|
||||
"favorites": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK},
|
||||
"radios": {"item": MediaClass.DIRECTORY, "children": MediaClass.APP},
|
||||
"artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST},
|
||||
"albums": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM},
|
||||
"tracks": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK},
|
||||
"playlists": {"item": MediaClass.DIRECTORY, "children": MediaClass.PLAYLIST},
|
||||
"genres": {"item": MediaClass.DIRECTORY, "children": MediaClass.GENRE},
|
||||
"new music": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM},
|
||||
"album artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST},
|
||||
MediaType.ALBUM: {"item": MediaClass.ALBUM, "children": MediaClass.TRACK},
|
||||
MediaType.ARTIST: {"item": MediaClass.ARTIST, "children": MediaClass.ALBUM},
|
||||
MediaType.TRACK: {"item": MediaClass.TRACK, "children": ""},
|
||||
@ -91,17 +89,15 @@ CONTENT_TYPE_TO_CHILD_TYPE: dict[
|
||||
MediaType.PLAYLIST: MediaType.PLAYLIST,
|
||||
MediaType.ARTIST: MediaType.ALBUM,
|
||||
MediaType.GENRE: MediaType.ARTIST,
|
||||
"Artists": MediaType.ARTIST,
|
||||
"Albums": MediaType.ALBUM,
|
||||
"Tracks": MediaType.TRACK,
|
||||
"Playlists": MediaType.PLAYLIST,
|
||||
"Genres": MediaType.GENRE,
|
||||
"Favorites": None, # can only be determined after inspecting the item
|
||||
"Apps": MediaClass.APP,
|
||||
"Radios": MediaClass.APP,
|
||||
"App": None, # can only be determined after inspecting the item
|
||||
"New Music": MediaType.ALBUM,
|
||||
"Album Artists": MediaType.ARTIST,
|
||||
"artists": MediaType.ARTIST,
|
||||
"albums": MediaType.ALBUM,
|
||||
"tracks": MediaType.TRACK,
|
||||
"playlists": MediaType.PLAYLIST,
|
||||
"genres": MediaType.GENRE,
|
||||
"favorites": None, # can only be determined after inspecting the item
|
||||
"radios": MediaClass.APP,
|
||||
"new music": MediaType.ALBUM,
|
||||
"album artists": MediaType.ARTIST,
|
||||
MediaType.APPS: MediaType.APP,
|
||||
MediaType.APP: MediaType.TRACK,
|
||||
}
|
||||
@ -173,7 +169,7 @@ def _build_response_known_app(
|
||||
|
||||
|
||||
def _build_response_favorites(item: dict[str, Any]) -> BrowseMedia:
|
||||
"""Build item for Favorites."""
|
||||
"""Build item for favorites."""
|
||||
if "album_id" in item:
|
||||
return BrowseMedia(
|
||||
media_content_id=str(item["album_id"]),
|
||||
@ -183,21 +179,21 @@ def _build_response_favorites(item: dict[str, Any]) -> BrowseMedia:
|
||||
can_expand=True,
|
||||
can_play=True,
|
||||
)
|
||||
if item["hasitems"] and not item["isaudio"]:
|
||||
if item.get("hasitems") and not item.get("isaudio"):
|
||||
return BrowseMedia(
|
||||
media_content_id=item["id"],
|
||||
title=item["title"],
|
||||
media_content_type="Favorites",
|
||||
media_class=CONTENT_TYPE_MEDIA_CLASS["Favorites"]["item"],
|
||||
media_content_type="favorites",
|
||||
media_class=CONTENT_TYPE_MEDIA_CLASS["favorites"]["item"],
|
||||
can_expand=True,
|
||||
can_play=False,
|
||||
)
|
||||
return BrowseMedia(
|
||||
media_content_id=item["id"],
|
||||
title=item["title"],
|
||||
media_content_type="Favorites",
|
||||
media_content_type="favorites",
|
||||
media_class=CONTENT_TYPE_MEDIA_CLASS[MediaType.TRACK]["item"],
|
||||
can_expand=item["hasitems"],
|
||||
can_expand=bool(item.get("hasitems")),
|
||||
can_play=bool(item["isaudio"] and item.get("url")),
|
||||
)
|
||||
|
||||
@ -220,7 +216,7 @@ def _get_item_thumbnail(
|
||||
item_type, item["id"], artwork_track_id
|
||||
)
|
||||
|
||||
elif search_type in ["Apps", "Radios"]:
|
||||
elif search_type in ["apps", "radios"]:
|
||||
item_thumbnail = player.generate_image_url(item["icon"])
|
||||
if item_thumbnail is None:
|
||||
item_thumbnail = item.get("image_url") # will not be proxied by HA
|
||||
@ -265,10 +261,10 @@ async def build_item_response(
|
||||
for item in result["items"]:
|
||||
# Force the item id to a string in case it's numeric from some lms
|
||||
item["id"] = str(item.get("id", ""))
|
||||
if search_type == "Favorites":
|
||||
if search_type == "favorites":
|
||||
child_media = _build_response_favorites(item)
|
||||
|
||||
elif search_type in ["Apps", "Radios"]:
|
||||
elif search_type in ["apps", "radios"]:
|
||||
# item["cmd"] contains the name of the command to use with the cli for the app
|
||||
# add the command to the dictionaries
|
||||
if item["title"] == "Search" or item.get("type") in UNPLAYABLE_TYPES:
|
||||
@ -364,11 +360,11 @@ async def library_payload(
|
||||
assert media_class["children"] is not None
|
||||
library_info["children"].append(
|
||||
BrowseMedia(
|
||||
title=item,
|
||||
title=item.title(),
|
||||
media_class=media_class["children"],
|
||||
media_content_id=item,
|
||||
media_content_type=item,
|
||||
can_play=item not in ["Favorites", "Apps", "Radios"],
|
||||
can_play=item not in ["favorites", "apps", "radios"],
|
||||
can_expand=True,
|
||||
)
|
||||
)
|
||||
|
@ -446,6 +446,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
"""Send the play_media command to the media player."""
|
||||
index = None
|
||||
|
||||
if media_type:
|
||||
media_type = media_type.lower()
|
||||
|
||||
enqueue: MediaPlayerEnqueue | None = kwargs.get(ATTR_MEDIA_ENQUEUE)
|
||||
|
||||
if enqueue == MediaPlayerEnqueue.ADD:
|
||||
@ -617,6 +620,9 @@ class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity):
|
||||
media_content_id,
|
||||
)
|
||||
|
||||
if media_content_type:
|
||||
media_content_type = media_content_type.lower()
|
||||
|
||||
if media_content_type in [None, "library"]:
|
||||
return await library_payload(self.hass, self._player, self._browse_data)
|
||||
|
||||
|
@ -61,6 +61,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="fuel",
|
||||
translation_key="fuel",
|
||||
device_class=SensorDeviceClass.VOLUME,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
|
@ -113,7 +113,9 @@ SENSORS: tuple[StarlinkSensorEntityDescription, ...] = (
|
||||
translation_key="last_boot_time",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: now() - timedelta(seconds=data.status["uptime"]),
|
||||
value_fn=lambda data: (
|
||||
now() - timedelta(seconds=data.status["uptime"])
|
||||
).replace(microsecond=0),
|
||||
),
|
||||
StarlinkSensorEntityDescription(
|
||||
key="ping_drop_rate",
|
||||
|
86
homeassistant/components/syncthru/quality_scale.yaml
Normal file
86
homeassistant/components/syncthru/quality_scale.yaml
Normal file
@ -0,0 +1,86 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: todo
|
||||
config-flow: todo
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP or zeroconf is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP or zeroconf is still possible
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single device.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
@ -59,7 +59,7 @@
|
||||
"name": "Lamp mode",
|
||||
"state": {
|
||||
"automatic": "Automatic",
|
||||
"manual": "Manual"
|
||||
"manual": "[%key:common::state::manual%]"
|
||||
}
|
||||
},
|
||||
"aroma_therapy_slot": {
|
||||
|
@ -75,7 +75,6 @@ class AirConEntity(WhirlpoolEntity, ClimateEntity):
|
||||
_attr_hvac_modes = SUPPORTED_HVAC_MODES
|
||||
_attr_max_temp = SUPPORTED_MAX_TEMP
|
||||
_attr_min_temp = SUPPORTED_MIN_TEMP
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.FAN_MODE
|
||||
|
@ -12,6 +12,7 @@ class WhirlpoolEntity(Entity):
|
||||
"""Base class for Whirlpool entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, appliance: Appliance, unique_id_suffix: str = "") -> None:
|
||||
"""Initialize the entity."""
|
||||
|
92
homeassistant/components/whirlpool/quality_scale.yaml
Normal file
92
homeassistant/components/whirlpool/quality_scale.yaml
Normal file
@ -0,0 +1,92 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup:
|
||||
status: todo
|
||||
comment: |
|
||||
When fetch_appliances fails, ConfigEntryNotReady should be raised.
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: todo
|
||||
comment: |
|
||||
- The calls to the api can be changed to return bool, and services can then raise HomeAssistantError
|
||||
- Current services raise ValueError and should raise ServiceValidationError instead.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: Integration has no configuration parameters
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
- Test helper init_integration() does not set a unique_id
|
||||
- Merge test_setup_http_exception and test_setup_auth_account_locked
|
||||
- The climate platform is at 94%
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: The "unknown" state should not be part of the enum for the dispense level sensor.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: todo
|
||||
comment: |
|
||||
Time remaining sensor still has hardcoded icon.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No known use cases for repair issues or flows, yet
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
@ -174,8 +174,6 @@ async def async_setup_entry(
|
||||
class WhirlpoolSensor(WhirlpoolEntity, SensorEntity):
|
||||
"""A class for the Whirlpool sensors."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self, appliance: Appliance, description: WhirlpoolSensorEntityDescription
|
||||
) -> None:
|
||||
|
@ -363,11 +363,17 @@ class DriverEvents:
|
||||
self.dev_reg.async_get_device(identifiers={get_device_id(driver, node)})
|
||||
for node in controller.nodes.values()
|
||||
]
|
||||
provisioned_devices = [
|
||||
self.dev_reg.async_get(entry.additional_properties["device_id"])
|
||||
for entry in await controller.async_get_provisioning_entries()
|
||||
if entry.additional_properties
|
||||
and "device_id" in entry.additional_properties
|
||||
]
|
||||
|
||||
# Devices that are in the device registry that are not known by the controller
|
||||
# can be removed
|
||||
for device in stored_devices:
|
||||
if device not in known_devices:
|
||||
if device not in known_devices and device not in provisioned_devices:
|
||||
self.dev_reg.async_remove_device(device.id)
|
||||
|
||||
# run discovery on controller node
|
||||
@ -448,6 +454,8 @@ class ControllerEvents:
|
||||
)
|
||||
)
|
||||
|
||||
await self.async_check_preprovisioned_device(node)
|
||||
|
||||
if node.is_controller_node:
|
||||
# Create a controller status sensor for each device
|
||||
async_dispatcher_send(
|
||||
@ -497,7 +505,7 @@ class ControllerEvents:
|
||||
|
||||
# we do submit the node to device registry so user has
|
||||
# some visual feedback that something is (in the process of) being added
|
||||
self.register_node_in_dev_reg(node)
|
||||
await self.async_register_node_in_dev_reg(node)
|
||||
|
||||
@callback
|
||||
def async_on_node_removed(self, event: dict) -> None:
|
||||
@ -574,18 +582,52 @@ class ControllerEvents:
|
||||
f"{DOMAIN}.identify_controller.{dev_id[1]}",
|
||||
)
|
||||
|
||||
@callback
|
||||
def register_node_in_dev_reg(self, node: ZwaveNode) -> dr.DeviceEntry:
|
||||
async def async_check_preprovisioned_device(self, node: ZwaveNode) -> None:
|
||||
"""Check if the node was preprovisioned and update the device registry."""
|
||||
provisioning_entry = (
|
||||
await self.driver_events.driver.controller.async_get_provisioning_entry(
|
||||
node.node_id
|
||||
)
|
||||
)
|
||||
if (
|
||||
provisioning_entry
|
||||
and provisioning_entry.additional_properties
|
||||
and "device_id" in provisioning_entry.additional_properties
|
||||
):
|
||||
preprovisioned_device = self.dev_reg.async_get(
|
||||
provisioning_entry.additional_properties["device_id"]
|
||||
)
|
||||
|
||||
if preprovisioned_device:
|
||||
dsk = provisioning_entry.dsk
|
||||
dsk_identifier = (DOMAIN, f"provision_{dsk}")
|
||||
|
||||
# If the pre-provisioned device has the DSK identifier, remove it
|
||||
if dsk_identifier in preprovisioned_device.identifiers:
|
||||
driver = self.driver_events.driver
|
||||
device_id = get_device_id(driver, node)
|
||||
device_id_ext = get_device_id_ext(driver, node)
|
||||
new_identifiers = preprovisioned_device.identifiers.copy()
|
||||
new_identifiers.remove(dsk_identifier)
|
||||
new_identifiers.add(device_id)
|
||||
if device_id_ext:
|
||||
new_identifiers.add(device_id_ext)
|
||||
self.dev_reg.async_update_device(
|
||||
preprovisioned_device.id,
|
||||
new_identifiers=new_identifiers,
|
||||
)
|
||||
|
||||
async def async_register_node_in_dev_reg(self, node: ZwaveNode) -> dr.DeviceEntry:
|
||||
"""Register node in dev reg."""
|
||||
driver = self.driver_events.driver
|
||||
device_id = get_device_id(driver, node)
|
||||
device_id_ext = get_device_id_ext(driver, node)
|
||||
node_id_device = self.dev_reg.async_get_device(identifiers={device_id})
|
||||
via_device_id = None
|
||||
via_identifier = None
|
||||
controller = driver.controller
|
||||
# Get the controller node device ID if this node is not the controller
|
||||
if controller.own_node and controller.own_node != node:
|
||||
via_device_id = get_device_id(driver, controller.own_node)
|
||||
via_identifier = get_device_id(driver, controller.own_node)
|
||||
|
||||
if device_id_ext:
|
||||
# If there is a device with this node ID but with a different hardware
|
||||
@ -632,7 +674,7 @@ class ControllerEvents:
|
||||
model=node.device_config.label,
|
||||
manufacturer=node.device_config.manufacturer,
|
||||
suggested_area=node.location if node.location else UNDEFINED,
|
||||
via_device=via_device_id,
|
||||
via_device=via_identifier,
|
||||
)
|
||||
|
||||
async_dispatcher_send(self.hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device)
|
||||
@ -666,7 +708,7 @@ class NodeEvents:
|
||||
"""Handle node ready event."""
|
||||
LOGGER.debug("Processing node %s", node)
|
||||
# register (or update) node in device registry
|
||||
device = self.controller_events.register_node_in_dev_reg(node)
|
||||
device = await self.controller_events.async_register_node_in_dev_reg(node)
|
||||
|
||||
# Remove any old value ids if this is a reinterview.
|
||||
self.controller_events.discovered_value_ids.pop(device.id, None)
|
||||
|
@ -91,6 +91,7 @@ from .const import (
|
||||
from .helpers import (
|
||||
async_enable_statistics,
|
||||
async_get_node_from_device_id,
|
||||
async_get_provisioning_entry_from_device_id,
|
||||
get_device_id,
|
||||
)
|
||||
|
||||
@ -171,6 +172,10 @@ ADDITIONAL_PROPERTIES = "additional_properties"
|
||||
STATUS = "status"
|
||||
REQUESTED_SECURITY_CLASSES = "requestedSecurityClasses"
|
||||
|
||||
PROTOCOL = "protocol"
|
||||
DEVICE_NAME = "device_name"
|
||||
AREA_ID = "area_id"
|
||||
|
||||
FEATURE = "feature"
|
||||
STRATEGY = "strategy"
|
||||
|
||||
@ -398,6 +403,7 @@ def async_register_api(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_s2_inclusion)
|
||||
websocket_api.async_register_command(hass, websocket_grant_security_classes)
|
||||
websocket_api.async_register_command(hass, websocket_validate_dsk_and_enter_pin)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_new_devices)
|
||||
websocket_api.async_register_command(hass, websocket_provision_smart_start_node)
|
||||
websocket_api.async_register_command(hass, websocket_unprovision_smart_start_node)
|
||||
websocket_api.async_register_command(hass, websocket_get_provisioning_entries)
|
||||
@ -631,14 +637,38 @@ async def websocket_node_metadata(
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@async_get_node
|
||||
async def websocket_node_alerts(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
node: Node,
|
||||
) -> None:
|
||||
"""Get the alerts for a Z-Wave JS node."""
|
||||
try:
|
||||
node = async_get_node_from_device_id(hass, msg[DEVICE_ID])
|
||||
except ValueError as err:
|
||||
if "can't be found" in err.args[0]:
|
||||
provisioning_entry = await async_get_provisioning_entry_from_device_id(
|
||||
hass, msg[DEVICE_ID]
|
||||
)
|
||||
if provisioning_entry:
|
||||
connection.send_result(
|
||||
msg[ID],
|
||||
{
|
||||
"comments": [
|
||||
{
|
||||
"level": "info",
|
||||
"text": "This device has been provisioned but is not yet included in the "
|
||||
"network.",
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
else:
|
||||
connection.send_error(msg[ID], ERR_NOT_FOUND, str(err))
|
||||
else:
|
||||
connection.send_error(msg[ID], ERR_NOT_LOADED, str(err))
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg[ID],
|
||||
{
|
||||
@ -971,12 +1001,58 @@ async def websocket_validate_dsk_and_enter_pin(
|
||||
connection.send_result(msg[ID])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required(TYPE): "zwave_js/subscribe_new_devices",
|
||||
vol.Required(ENTRY_ID): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def websocket_subscribe_new_devices(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to new devices."""
|
||||
|
||||
@callback
|
||||
def async_cleanup() -> None:
|
||||
for unsub in unsubs:
|
||||
unsub()
|
||||
|
||||
@callback
|
||||
def device_registered(device: dr.DeviceEntry) -> None:
|
||||
device_details = {
|
||||
"name": device.name,
|
||||
"id": device.id,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model": device.model,
|
||||
}
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg[ID], {"event": "device registered", "device": device_details}
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_cleanup
|
||||
msg[DATA_UNSUBSCRIBE] = unsubs = [
|
||||
async_dispatcher_connect(
|
||||
hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device_registered
|
||||
),
|
||||
]
|
||||
connection.send_result(msg[ID])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required(TYPE): "zwave_js/provision_smart_start_node",
|
||||
vol.Required(ENTRY_ID): str,
|
||||
vol.Required(QR_PROVISIONING_INFORMATION): QR_PROVISIONING_INFORMATION_SCHEMA,
|
||||
vol.Optional(PROTOCOL): vol.Coerce(Protocols),
|
||||
vol.Optional(DEVICE_NAME): str,
|
||||
vol.Optional(AREA_ID): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@ -991,18 +1067,68 @@ async def websocket_provision_smart_start_node(
|
||||
driver: Driver,
|
||||
) -> None:
|
||||
"""Pre-provision a smart start node."""
|
||||
qr_info = msg[QR_PROVISIONING_INFORMATION]
|
||||
|
||||
provisioning_info = msg[QR_PROVISIONING_INFORMATION]
|
||||
|
||||
if provisioning_info.version == QRCodeVersion.S2:
|
||||
if qr_info.version == QRCodeVersion.S2:
|
||||
connection.send_error(
|
||||
msg[ID],
|
||||
ERR_INVALID_FORMAT,
|
||||
"QR code version S2 is not supported for this command",
|
||||
)
|
||||
return
|
||||
|
||||
provisioning_info = ProvisioningEntry(
|
||||
dsk=qr_info.dsk,
|
||||
security_classes=qr_info.security_classes,
|
||||
requested_security_classes=qr_info.requested_security_classes,
|
||||
protocol=msg.get(PROTOCOL),
|
||||
additional_properties=qr_info.additional_properties,
|
||||
)
|
||||
|
||||
device = None
|
||||
# Create an empty device if device_name is provided
|
||||
if device_name := msg.get(DEVICE_NAME):
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
# Create a unique device identifier using the DSK
|
||||
device_identifier = (DOMAIN, f"provision_{qr_info.dsk}")
|
||||
|
||||
manufacturer = None
|
||||
model = None
|
||||
|
||||
device_info = await driver.config_manager.lookup_device(
|
||||
qr_info.manufacturer_id,
|
||||
qr_info.product_type,
|
||||
qr_info.product_id,
|
||||
)
|
||||
if device_info:
|
||||
manufacturer = device_info.manufacturer
|
||||
model = device_info.label
|
||||
|
||||
# Create an empty device
|
||||
device = dev_reg.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={device_identifier},
|
||||
name=device_name,
|
||||
manufacturer=manufacturer,
|
||||
model=model,
|
||||
via_device=get_device_id(driver, driver.controller.own_node)
|
||||
if driver.controller.own_node
|
||||
else None,
|
||||
)
|
||||
dev_reg.async_update_device(
|
||||
device.id, area_id=msg.get(AREA_ID), name_by_user=device_name
|
||||
)
|
||||
|
||||
if provisioning_info.additional_properties is None:
|
||||
provisioning_info.additional_properties = {}
|
||||
provisioning_info.additional_properties["device_id"] = device.id
|
||||
|
||||
await driver.controller.async_provision_smart_start_node(provisioning_info)
|
||||
connection.send_result(msg[ID])
|
||||
if device:
|
||||
connection.send_result(msg[ID], device.id)
|
||||
else:
|
||||
connection.send_result(msg[ID])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@ -1036,7 +1162,24 @@ async def websocket_unprovision_smart_start_node(
|
||||
)
|
||||
return
|
||||
dsk_or_node_id = msg.get(DSK) or msg[NODE_ID]
|
||||
provisioning_entry = await driver.controller.async_get_provisioning_entry(
|
||||
dsk_or_node_id
|
||||
)
|
||||
if (
|
||||
provisioning_entry
|
||||
and provisioning_entry.additional_properties
|
||||
and "device_id" in provisioning_entry.additional_properties
|
||||
):
|
||||
device_identifier = (DOMAIN, f"provision_{provisioning_entry.dsk}")
|
||||
device_id = provisioning_entry.additional_properties["device_id"]
|
||||
dev_reg = dr.async_get(hass)
|
||||
device = dev_reg.async_get(device_id)
|
||||
if device and device.identifiers == {device_identifier}:
|
||||
# Only remove the device if nothing else has claimed it
|
||||
dev_reg.async_remove_device(device_id)
|
||||
|
||||
await driver.controller.async_unprovision_smart_start_node(dsk_or_node_id)
|
||||
|
||||
connection.send_result(msg[ID])
|
||||
|
||||
|
||||
|
@ -4,12 +4,17 @@ from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from serial.tools import list_ports
|
||||
import voluptuous as vol
|
||||
from zwave_js_server.client import Client
|
||||
from zwave_js_server.exceptions import FailedCommand
|
||||
from zwave_js_server.model.driver import Driver
|
||||
from zwave_js_server.version import VersionInfo, get_server_version
|
||||
|
||||
from homeassistant.components import usb
|
||||
@ -23,6 +28,7 @@ from homeassistant.config_entries import (
|
||||
SOURCE_USB,
|
||||
ConfigEntry,
|
||||
ConfigEntryBaseFlow,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
@ -60,6 +66,7 @@ from .const import (
|
||||
CONF_S2_UNAUTHENTICATED_KEY,
|
||||
CONF_USB_PATH,
|
||||
CONF_USE_ADDON,
|
||||
DATA_CLIENT,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
@ -74,6 +81,9 @@ CONF_EMULATE_HARDWARE = "emulate_hardware"
|
||||
CONF_LOG_LEVEL = "log_level"
|
||||
SERVER_VERSION_TIMEOUT = 10
|
||||
|
||||
OPTIONS_INTENT_MIGRATE = "intent_migrate"
|
||||
OPTIONS_INTENT_RECONFIGURE = "intent_reconfigure"
|
||||
|
||||
ADDON_LOG_LEVELS = {
|
||||
"error": "Error",
|
||||
"warn": "Warn",
|
||||
@ -636,7 +646,12 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN):
|
||||
}
|
||||
|
||||
if not self._usb_discovery:
|
||||
ports = await async_get_usb_ports(self.hass)
|
||||
try:
|
||||
ports = await async_get_usb_ports(self.hass)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Failed to get USB ports: %s", err)
|
||||
return self.async_abort(reason="usb_ports_failed")
|
||||
|
||||
schema = {
|
||||
vol.Required(CONF_USB_PATH, default=usb_path): vol.In(ports),
|
||||
**schema,
|
||||
@ -717,6 +732,10 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
super().__init__()
|
||||
self.original_addon_config: dict[str, Any] | None = None
|
||||
self.revert_reason: str | None = None
|
||||
self.backup_task: asyncio.Task | None = None
|
||||
self.restore_backup_task: asyncio.Task | None = None
|
||||
self.backup_data: bytes | None = None
|
||||
self.backup_filepath: str | None = None
|
||||
|
||||
@callback
|
||||
def _async_update_entry(self, data: dict[str, Any]) -> None:
|
||||
@ -725,6 +744,18 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm if we are migrating adapters or just re-configuring."""
|
||||
return self.async_show_menu(
|
||||
step_id="init",
|
||||
menu_options=[
|
||||
OPTIONS_INTENT_RECONFIGURE,
|
||||
OPTIONS_INTENT_MIGRATE,
|
||||
],
|
||||
)
|
||||
|
||||
async def async_step_intent_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
if is_hassio(self.hass):
|
||||
@ -732,6 +763,91 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
|
||||
return await self.async_step_manual()
|
||||
|
||||
async def async_step_intent_migrate(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the user wants to reset their current controller."""
|
||||
if not self.config_entry.data.get(CONF_USE_ADDON):
|
||||
return self.async_abort(reason="addon_required")
|
||||
|
||||
if user_input is not None:
|
||||
return await self.async_step_backup_nvm()
|
||||
|
||||
return self.async_show_form(step_id="intent_migrate")
|
||||
|
||||
async def async_step_backup_nvm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Backup the current network."""
|
||||
if self.backup_task is None:
|
||||
self.backup_task = self.hass.async_create_task(self._async_backup_network())
|
||||
|
||||
if not self.backup_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="backup_nvm",
|
||||
progress_action="backup_nvm",
|
||||
progress_task=self.backup_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.backup_task
|
||||
except AbortFlow as err:
|
||||
_LOGGER.error(err)
|
||||
return self.async_show_progress_done(next_step_id="backup_failed")
|
||||
finally:
|
||||
self.backup_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="instruct_unplug")
|
||||
|
||||
async def async_step_restore_nvm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Restore the backup."""
|
||||
if self.restore_backup_task is None:
|
||||
self.restore_backup_task = self.hass.async_create_task(
|
||||
self._async_restore_network_backup()
|
||||
)
|
||||
|
||||
if not self.restore_backup_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="restore_nvm",
|
||||
progress_action="restore_nvm",
|
||||
progress_task=self.restore_backup_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.restore_backup_task
|
||||
except AbortFlow as err:
|
||||
_LOGGER.error(err)
|
||||
return self.async_show_progress_done(next_step_id="restore_failed")
|
||||
finally:
|
||||
self.restore_backup_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="migration_done")
|
||||
|
||||
async def async_step_instruct_unplug(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Reset the current controller, and instruct the user to unplug it."""
|
||||
|
||||
if user_input is not None:
|
||||
# Now that the old controller is gone, we can scan for serial ports again
|
||||
return await self.async_step_choose_serial_port()
|
||||
|
||||
# reset the old controller
|
||||
try:
|
||||
await self._get_driver().async_hard_reset()
|
||||
except FailedCommand as err:
|
||||
_LOGGER.error("Failed to reset controller: %s", err)
|
||||
return self.async_abort(reason="reset_failed")
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="instruct_unplug",
|
||||
description_placeholders={
|
||||
"file_path": str(self.backup_filepath),
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_manual(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@ -881,7 +997,11 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
log_level = addon_config.get(CONF_ADDON_LOG_LEVEL, "info")
|
||||
emulate_hardware = addon_config.get(CONF_ADDON_EMULATE_HARDWARE, False)
|
||||
|
||||
ports = await async_get_usb_ports(self.hass)
|
||||
try:
|
||||
ports = await async_get_usb_ports(self.hass)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Failed to get USB ports: %s", err)
|
||||
return self.async_abort(reason="usb_ports_failed")
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
@ -911,12 +1031,64 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
|
||||
return self.async_show_form(step_id="configure_addon", data_schema=data_schema)
|
||||
|
||||
async def async_step_choose_serial_port(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Choose a serial port."""
|
||||
if user_input is not None:
|
||||
addon_info = await self._async_get_addon_info()
|
||||
addon_config = addon_info.options
|
||||
self.usb_path = user_input[CONF_USB_PATH]
|
||||
new_addon_config = {
|
||||
**addon_config,
|
||||
CONF_ADDON_DEVICE: self.usb_path,
|
||||
}
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
self.restart_addon = True
|
||||
# Copy the add-on config to keep the objects separate.
|
||||
self.original_addon_config = dict(addon_config)
|
||||
await self._async_set_addon_config(new_addon_config)
|
||||
return await self.async_step_start_addon()
|
||||
|
||||
try:
|
||||
ports = await async_get_usb_ports(self.hass)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Failed to get USB ports: %s", err)
|
||||
return self.async_abort(reason="usb_ports_failed")
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USB_PATH): vol.In(ports),
|
||||
}
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="choose_serial_port", data_schema=data_schema
|
||||
)
|
||||
|
||||
async def async_step_start_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Add-on start failed."""
|
||||
return await self.async_revert_addon_config(reason="addon_start_failed")
|
||||
|
||||
async def async_step_backup_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Backup failed."""
|
||||
return self.async_abort(reason="backup_failed")
|
||||
|
||||
async def async_step_restore_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Restore failed."""
|
||||
return self.async_abort(reason="restore_failed")
|
||||
|
||||
async def async_step_migration_done(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Migration done."""
|
||||
return self.async_create_entry(title=TITLE, data={})
|
||||
|
||||
async def async_step_finish_addon_setup(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@ -943,12 +1115,16 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
except CannotConnect:
|
||||
return await self.async_revert_addon_config(reason="cannot_connect")
|
||||
|
||||
if self.config_entry.unique_id != str(self.version_info.home_id):
|
||||
if self.backup_data is None and self.config_entry.unique_id != str(
|
||||
self.version_info.home_id
|
||||
):
|
||||
return await self.async_revert_addon_config(reason="different_device")
|
||||
|
||||
self._async_update_entry(
|
||||
{
|
||||
**self.config_entry.data,
|
||||
# this will only be different in a migration flow
|
||||
"unique_id": str(self.version_info.home_id),
|
||||
CONF_URL: self.ws_address,
|
||||
CONF_USB_PATH: self.usb_path,
|
||||
CONF_S0_LEGACY_KEY: self.s0_legacy_key,
|
||||
@ -961,6 +1137,9 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
|
||||
}
|
||||
)
|
||||
if self.backup_data:
|
||||
return await self.async_step_restore_nvm()
|
||||
|
||||
# Always reload entry since we may have disconnected the client.
|
||||
self.hass.config_entries.async_schedule_reload(self.config_entry.entry_id)
|
||||
return self.async_create_entry(title=TITLE, data={})
|
||||
@ -990,6 +1169,74 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow):
|
||||
_LOGGER.debug("Reverting add-on options, reason: %s", reason)
|
||||
return await self.async_step_configure_addon(addon_config_input)
|
||||
|
||||
async def _async_backup_network(self) -> None:
|
||||
"""Backup the current network."""
|
||||
|
||||
@callback
|
||||
def forward_progress(event: dict) -> None:
|
||||
"""Forward progress events to frontend."""
|
||||
self.async_update_progress(event["bytesRead"] / event["total"])
|
||||
|
||||
controller = self._get_driver().controller
|
||||
unsub = controller.on("nvm backup progress", forward_progress)
|
||||
try:
|
||||
self.backup_data = await controller.async_backup_nvm_raw()
|
||||
except FailedCommand as err:
|
||||
raise AbortFlow(f"Failed to backup network: {err}") from err
|
||||
finally:
|
||||
unsub()
|
||||
|
||||
# save the backup to a file just in case
|
||||
self.backup_filepath = self.hass.config.path(
|
||||
f"zwavejs_nvm_backup_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.bin"
|
||||
)
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
Path(self.backup_filepath).write_bytes,
|
||||
self.backup_data,
|
||||
)
|
||||
except OSError as err:
|
||||
raise AbortFlow(f"Failed to save backup file: {err}") from err
|
||||
|
||||
async def _async_restore_network_backup(self) -> None:
|
||||
"""Restore the backup."""
|
||||
assert self.backup_data is not None
|
||||
|
||||
# Reload the config entry to reconnect the client after the addon restart
|
||||
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
|
||||
@callback
|
||||
def forward_progress(event: dict) -> None:
|
||||
"""Forward progress events to frontend."""
|
||||
if event["event"] == "nvm convert progress":
|
||||
# assume convert is 50% of the total progress
|
||||
self.async_update_progress(event["bytesRead"] / event["total"] * 0.5)
|
||||
elif event["event"] == "nvm restore progress":
|
||||
# assume restore is the rest of the progress
|
||||
self.async_update_progress(
|
||||
event["bytesWritten"] / event["total"] * 0.5 + 0.5
|
||||
)
|
||||
|
||||
controller = self._get_driver().controller
|
||||
unsubs = [
|
||||
controller.on("nvm convert progress", forward_progress),
|
||||
controller.on("nvm restore progress", forward_progress),
|
||||
]
|
||||
try:
|
||||
await controller.async_restore_nvm(self.backup_data)
|
||||
except FailedCommand as err:
|
||||
raise AbortFlow(f"Failed to restore network: {err}") from err
|
||||
finally:
|
||||
for unsub in unsubs:
|
||||
unsub()
|
||||
|
||||
def _get_driver(self) -> Driver:
|
||||
if self.config_entry.state != ConfigEntryState.LOADED:
|
||||
raise AbortFlow("Configuration entry is not loaded")
|
||||
client: Client = self.config_entry.runtime_data[DATA_CLIENT]
|
||||
assert client.driver is not None
|
||||
return client.driver
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Indicate connection error."""
|
||||
|
@ -15,7 +15,7 @@ from zwave_js_server.const import (
|
||||
ConfigurationValueType,
|
||||
LogLevel,
|
||||
)
|
||||
from zwave_js_server.model.controller import Controller
|
||||
from zwave_js_server.model.controller import Controller, ProvisioningEntry
|
||||
from zwave_js_server.model.driver import Driver
|
||||
from zwave_js_server.model.log_config import LogConfig
|
||||
from zwave_js_server.model.node import Node as ZwaveNode
|
||||
@ -233,7 +233,7 @@ def get_home_and_node_id_from_device_entry(
|
||||
),
|
||||
None,
|
||||
)
|
||||
if device_id is None:
|
||||
if device_id is None or device_id.startswith("provision_"):
|
||||
return None
|
||||
id_ = device_id.split("-")
|
||||
return (id_[0], int(id_[1]))
|
||||
@ -264,12 +264,12 @@ def async_get_node_from_device_id(
|
||||
),
|
||||
None,
|
||||
)
|
||||
if entry and entry.state != ConfigEntryState.LOADED:
|
||||
raise ValueError(f"Device {device_id} config entry is not loaded")
|
||||
if entry is None:
|
||||
raise ValueError(
|
||||
f"Device {device_id} is not from an existing zwave_js config entry"
|
||||
)
|
||||
if entry.state != ConfigEntryState.LOADED:
|
||||
raise ValueError(f"Device {device_id} config entry is not loaded")
|
||||
|
||||
client: ZwaveClient = entry.runtime_data[DATA_CLIENT]
|
||||
driver = client.driver
|
||||
@ -289,6 +289,53 @@ def async_get_node_from_device_id(
|
||||
return driver.controller.nodes[node_id]
|
||||
|
||||
|
||||
async def async_get_provisioning_entry_from_device_id(
|
||||
hass: HomeAssistant, device_id: str
|
||||
) -> ProvisioningEntry | None:
|
||||
"""Get provisioning entry from a device ID.
|
||||
|
||||
Raises ValueError if device is invalid
|
||||
"""
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
if not (device_entry := dev_reg.async_get(device_id)):
|
||||
raise ValueError(f"Device ID {device_id} is not valid")
|
||||
|
||||
# Use device config entry ID's to validate that this is a valid zwave_js device
|
||||
# and to get the client
|
||||
config_entry_ids = device_entry.config_entries
|
||||
entry = next(
|
||||
(
|
||||
entry
|
||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
if entry.entry_id in config_entry_ids
|
||||
),
|
||||
None,
|
||||
)
|
||||
if entry is None:
|
||||
raise ValueError(
|
||||
f"Device {device_id} is not from an existing zwave_js config entry"
|
||||
)
|
||||
if entry.state != ConfigEntryState.LOADED:
|
||||
raise ValueError(f"Device {device_id} config entry is not loaded")
|
||||
|
||||
client: ZwaveClient = entry.runtime_data[DATA_CLIENT]
|
||||
driver = client.driver
|
||||
|
||||
if driver is None:
|
||||
raise ValueError("Driver is not ready.")
|
||||
|
||||
provisioning_entries = await driver.controller.async_get_provisioning_entries()
|
||||
for provisioning_entry in provisioning_entries:
|
||||
if (
|
||||
provisioning_entry.additional_properties
|
||||
and provisioning_entry.additional_properties.get("device_id") == device_id
|
||||
):
|
||||
return provisioning_entry
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_node_from_entity_id(
|
||||
hass: HomeAssistant,
|
||||
|
@ -11,7 +11,11 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"discovery_requires_supervisor": "Discovery requires the supervisor.",
|
||||
"not_zwave_device": "Discovered device is not a Z-Wave device.",
|
||||
"not_zwave_js_addon": "Discovered add-on is not the official Z-Wave add-on."
|
||||
"not_zwave_js_addon": "Discovered add-on is not the official Z-Wave add-on.",
|
||||
"backup_failed": "Failed to backup network.",
|
||||
"restore_failed": "Failed to restore network.",
|
||||
"reset_failed": "Failed to reset controller.",
|
||||
"usb_ports_failed": "Failed to get USB devices."
|
||||
},
|
||||
"error": {
|
||||
"addon_start_failed": "Failed to start the Z-Wave add-on. Check the configuration.",
|
||||
@ -22,7 +26,9 @@
|
||||
"flow_title": "{name}",
|
||||
"progress": {
|
||||
"install_addon": "Please wait while the Z-Wave add-on installation finishes. This can take several minutes.",
|
||||
"start_addon": "Please wait while the Z-Wave add-on start completes. This may take some seconds."
|
||||
"start_addon": "Please wait while the Z-Wave add-on start completes. This may take some seconds.",
|
||||
"backup_nvm": "Please wait while the network backup completes.",
|
||||
"restore_nvm": "Please wait while the network restore completes."
|
||||
},
|
||||
"step": {
|
||||
"configure_addon": {
|
||||
@ -217,7 +223,12 @@
|
||||
"addon_stop_failed": "Failed to stop the Z-Wave add-on.",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"different_device": "The connected USB device is not the same as previously configured for this config entry. Please instead create a new config entry for the new device."
|
||||
"different_device": "The connected USB device is not the same as previously configured for this config entry. Please instead create a new config entry for the new device.",
|
||||
"addon_required": "The Z-Wave migration flow requires the integration to be configured using the Z-Wave Supervisor add-on. You can still use the Backup and Restore buttons to migrate your network manually.",
|
||||
"backup_failed": "[%key:component::zwave_js::config::abort::backup_failed%]",
|
||||
"restore_failed": "[%key:component::zwave_js::config::abort::restore_failed%]",
|
||||
"reset_failed": "[%key:component::zwave_js::config::abort::reset_failed%]",
|
||||
"usb_ports_failed": "[%key:component::zwave_js::config::abort::usb_ports_failed%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@ -226,9 +237,27 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::zwave_js::config::progress::install_addon%]",
|
||||
"start_addon": "[%key:component::zwave_js::config::progress::start_addon%]"
|
||||
"start_addon": "[%key:component::zwave_js::config::progress::start_addon%]",
|
||||
"backup_nvm": "[%key:component::zwave_js::config::progress::backup_nvm%]",
|
||||
"restore_nvm": "[%key:component::zwave_js::config::progress::restore_nvm%]"
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Migrate or re-configure",
|
||||
"description": "Are you migrating to a new controller or re-configuring the current controller?",
|
||||
"menu_options": {
|
||||
"intent_migrate": "Migrate to a new controller",
|
||||
"intent_reconfigure": "Re-configure the current controller"
|
||||
}
|
||||
},
|
||||
"intent_migrate": {
|
||||
"title": "[%key:component::zwave_js::options::step::init::menu_options::intent_migrate%]",
|
||||
"description": "Before setting up your new controller, your old controller needs to be reset. A backup will be performed first.\n\nDo you wish to continue?"
|
||||
},
|
||||
"instruct_unplug": {
|
||||
"title": "Unplug your old controller",
|
||||
"description": "Backup saved to \"{file_path}\"\n\nYour old controller has been reset. If the hardware is no longer needed, you can now unplug it.\n\nPlease make sure your new controller is plugged in before continuing."
|
||||
},
|
||||
"configure_addon": {
|
||||
"data": {
|
||||
"emulate_hardware": "Emulate Hardware",
|
||||
@ -242,6 +271,12 @@
|
||||
"description": "[%key:component::zwave_js::config::step::configure_addon::description%]",
|
||||
"title": "[%key:component::zwave_js::config::step::configure_addon::title%]"
|
||||
},
|
||||
"choose_serial_port": {
|
||||
"data": {
|
||||
"usb_path": "[%key:common::config_flow::data::usb_path%]"
|
||||
},
|
||||
"title": "Select your Z-Wave device"
|
||||
},
|
||||
"install_addon": {
|
||||
"title": "[%key:component::zwave_js::config::step::install_addon::title%]"
|
||||
},
|
||||
|
@ -3329,7 +3329,7 @@
|
||||
"name": "La Marzocco",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
"lametric": {
|
||||
"name": "LaMetric",
|
||||
|
@ -1072,7 +1072,7 @@ class TemplateStateBase(State):
|
||||
raise KeyError
|
||||
|
||||
@under_cached_property
|
||||
def entity_id(self) -> str: # type: ignore[override]
|
||||
def entity_id(self) -> str:
|
||||
"""Wrap State.entity_id.
|
||||
|
||||
Intentionally does not collect state
|
||||
@ -1128,7 +1128,7 @@ class TemplateStateBase(State):
|
||||
return self._state.object_id
|
||||
|
||||
@property
|
||||
def name(self) -> str: # type: ignore[override]
|
||||
def name(self) -> str:
|
||||
"""Wrap State.name."""
|
||||
self._collect_state()
|
||||
return self._state.name
|
||||
|
@ -34,7 +34,7 @@ dbus-fast==2.43.0
|
||||
fnv-hash-fast==1.4.0
|
||||
go2rtc-client==0.1.2
|
||||
ha-ffmpeg==3.2.2
|
||||
habluetooth==3.38.1
|
||||
habluetooth==3.39.0
|
||||
hass-nabucasa==0.94.0
|
||||
hassil==2.2.3
|
||||
home-assistant-bluetooth==1.13.1
|
||||
@ -74,7 +74,7 @@ voluptuous-openapi==0.0.6
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.19.0
|
||||
yarl==1.20.0
|
||||
zeroconf==0.146.5
|
||||
|
||||
# Constrain pycryptodome to avoid vulnerability
|
||||
|
@ -121,7 +121,7 @@ dependencies = [
|
||||
"voluptuous==0.15.2",
|
||||
"voluptuous-serialize==2.6.0",
|
||||
"voluptuous-openapi==0.0.6",
|
||||
"yarl==1.19.0",
|
||||
"yarl==1.20.0",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.146.5",
|
||||
]
|
||||
|
2
requirements.txt
generated
2
requirements.txt
generated
@ -58,6 +58,6 @@ uv==0.6.10
|
||||
voluptuous==0.15.2
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous-openapi==0.0.6
|
||||
yarl==1.19.0
|
||||
yarl==1.20.0
|
||||
webrtc-models==0.3.0
|
||||
zeroconf==0.146.5
|
||||
|
10
requirements_all.txt
generated
10
requirements_all.txt
generated
@ -829,7 +829,7 @@ ebusdpy==0.0.17
|
||||
ecoaliface==0.4.0
|
||||
|
||||
# homeassistant.components.eheimdigital
|
||||
eheimdigital==1.0.6
|
||||
eheimdigital==1.1.0
|
||||
|
||||
# homeassistant.components.electric_kiwi
|
||||
electrickiwi-api==0.9.14
|
||||
@ -889,7 +889,7 @@ epson-projector==0.5.1
|
||||
eq3btsmart==1.4.1
|
||||
|
||||
# homeassistant.components.esphome
|
||||
esphome-dashboard-api==1.2.3
|
||||
esphome-dashboard-api==1.3.0
|
||||
|
||||
# homeassistant.components.netgear_lte
|
||||
eternalegypt==0.0.16
|
||||
@ -1114,7 +1114,7 @@ ha-silabs-firmware-client==0.2.0
|
||||
habiticalib==0.3.7
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==3.38.1
|
||||
habluetooth==3.39.0
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.94.0
|
||||
@ -1957,7 +1957,7 @@ pyenphase==1.25.5
|
||||
pyenvisalink==4.7
|
||||
|
||||
# homeassistant.components.ephember
|
||||
pyephember==0.3.1
|
||||
pyephember2==0.4.12
|
||||
|
||||
# homeassistant.components.everlights
|
||||
pyeverlights==0.1.0
|
||||
@ -2089,7 +2089,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==1.4.9
|
||||
pylamarzocco==2.0.0b1
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
|
@ -10,9 +10,10 @@
|
||||
astroid==3.3.9
|
||||
coverage==7.6.12
|
||||
freezegun==1.5.1
|
||||
go2rtc-client==0.1.2
|
||||
license-expression==30.4.1
|
||||
mock-open==1.4.0
|
||||
mypy-dev==1.16.0a7
|
||||
mypy-dev==1.16.0a8
|
||||
pre-commit==4.0.0
|
||||
pydantic==2.11.3
|
||||
pylint==3.3.6
|
||||
|
8
requirements_test_all.txt
generated
8
requirements_test_all.txt
generated
@ -708,7 +708,7 @@ eagle100==0.1.1
|
||||
easyenergy==2.1.2
|
||||
|
||||
# homeassistant.components.eheimdigital
|
||||
eheimdigital==1.0.6
|
||||
eheimdigital==1.1.0
|
||||
|
||||
# homeassistant.components.electric_kiwi
|
||||
electrickiwi-api==0.9.14
|
||||
@ -759,7 +759,7 @@ epson-projector==0.5.1
|
||||
eq3btsmart==1.4.1
|
||||
|
||||
# homeassistant.components.esphome
|
||||
esphome-dashboard-api==1.2.3
|
||||
esphome-dashboard-api==1.3.0
|
||||
|
||||
# homeassistant.components.netgear_lte
|
||||
eternalegypt==0.0.16
|
||||
@ -956,7 +956,7 @@ ha-silabs-firmware-client==0.2.0
|
||||
habiticalib==0.3.7
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
habluetooth==3.38.1
|
||||
habluetooth==3.39.0
|
||||
|
||||
# homeassistant.components.cloud
|
||||
hass-nabucasa==0.94.0
|
||||
@ -1704,7 +1704,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==1.4.9
|
||||
pylamarzocco==2.0.0b1
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
|
@ -969,7 +969,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"switcher_kis",
|
||||
"switchmate",
|
||||
"syncthing",
|
||||
"syncthru",
|
||||
"synology_chat",
|
||||
"synology_dsm",
|
||||
"synology_srm",
|
||||
@ -1100,7 +1099,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [
|
||||
"weatherkit",
|
||||
"webmin",
|
||||
"wemo",
|
||||
"whirlpool",
|
||||
"whois",
|
||||
"wiffi",
|
||||
"wilight",
|
||||
|
@ -355,6 +355,7 @@ async def test_browse_media(
|
||||
"children_media_class": "app",
|
||||
"can_play": False,
|
||||
"can_expand": True,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"not_shown": 0,
|
||||
"children": [
|
||||
@ -366,6 +367,7 @@ async def test_browse_media(
|
||||
"children_media_class": None,
|
||||
"can_play": False,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": "https://www.youtube.com/icon.png",
|
||||
},
|
||||
{
|
||||
@ -376,6 +378,7 @@ async def test_browse_media(
|
||||
"children_media_class": None,
|
||||
"can_play": False,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": "",
|
||||
},
|
||||
],
|
||||
|
@ -1323,6 +1323,7 @@ async def test_async_play_media_url_m3u(
|
||||
"media_content_id": "media-source://media_source/local/test.mp3",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
},
|
||||
@ -1337,6 +1338,7 @@ async def test_async_play_media_url_m3u(
|
||||
"media_content_id": ("media-source://media_source/local/test.mp4"),
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
},
|
||||
|
@ -4,6 +4,7 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'directory',
|
||||
'media_content_id': '',
|
||||
@ -18,6 +19,7 @@
|
||||
dict({
|
||||
'can_expand': False,
|
||||
'can_play': True,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'music',
|
||||
'media_content_id': '1',
|
||||
@ -28,6 +30,7 @@
|
||||
dict({
|
||||
'can_expand': False,
|
||||
'can_play': True,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'music',
|
||||
'media_content_id': '2',
|
||||
|
@ -1037,6 +1037,7 @@ async def test_entity_browse_media(
|
||||
),
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -1049,6 +1050,7 @@ async def test_entity_browse_media(
|
||||
"media_content_id": "media-source://media_source/local/test.mp3",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -1107,6 +1109,7 @@ async def test_entity_browse_media_audio_only(
|
||||
"media_content_id": "media-source://media_source/local/test.mp3",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -2208,6 +2211,7 @@ async def test_cast_platform_browse_media(
|
||||
"media_content_id": "",
|
||||
"can_play": False,
|
||||
"can_expand": True,
|
||||
"can_search": False,
|
||||
"thumbnail": "https://brands.home-assistant.io/_/spotify/logo.png",
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -2232,6 +2236,7 @@ async def test_cast_platform_browse_media(
|
||||
"media_content_id": "",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"children_media_class": None,
|
||||
"thumbnail": None,
|
||||
"children": [],
|
||||
|
@ -64,6 +64,7 @@ class MockDevice(Device):
|
||||
return_value=FIRMWARE_UPDATE_AVAILABLE
|
||||
)
|
||||
self.device.async_get_led_setting = AsyncMock(return_value=False)
|
||||
self.device.async_set_led_setting = AsyncMock(return_value=True)
|
||||
self.device.async_restart = AsyncMock(return_value=True)
|
||||
self.device.async_uptime = AsyncMock(return_value=UPTIME)
|
||||
self.device.async_start_wps = AsyncMock(return_value=True)
|
||||
@ -71,6 +72,7 @@ class MockDevice(Device):
|
||||
return_value=CONNECTED_STATIONS
|
||||
)
|
||||
self.device.async_get_wifi_guest_access = AsyncMock(return_value=GUEST_WIFI)
|
||||
self.device.async_set_wifi_guest_access = AsyncMock(return_value=True)
|
||||
self.device.async_get_wifi_neighbor_access_points = AsyncMock(
|
||||
return_value=NEIGHBOR_ACCESS_POINTS
|
||||
)
|
||||
|
@ -3,7 +3,6 @@
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'band': '5 GHz',
|
||||
'icon': 'mdi:lan-connect',
|
||||
'mac': 'AA:BB:CC:DD:EE:FF',
|
||||
'source_type': <SourceType.ROUTER: 'router'>,
|
||||
'wifi': 'Main',
|
||||
|
@ -4,6 +4,7 @@ from unittest.mock import AsyncMock
|
||||
|
||||
from devolo_plc_api.exceptions.device import DeviceUnavailable
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.device_tracker import DOMAIN as PLATFORM
|
||||
@ -25,6 +26,7 @@ STATION = CONNECTED_STATIONS[0]
|
||||
SERIAL = DISCOVERY_INFO.properties["SN"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
|
||||
async def test_device_tracker(
|
||||
hass: HomeAssistant,
|
||||
mock_device: MockDevice,
|
||||
@ -42,14 +44,6 @@ async def test_device_tracker(
|
||||
freezer.tick(LONG_UPDATE_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Enable entity
|
||||
entity_registry.async_update_entity(state_key, disabled_by=None)
|
||||
await hass.async_block_till_done()
|
||||
freezer.tick(LONG_UPDATE_INTERVAL)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get(state_key) == snapshot
|
||||
|
||||
# Emulate state change
|
||||
|
@ -15,7 +15,7 @@ from homeassistant.components.sensor import DOMAIN as SENSOR
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH
|
||||
from homeassistant.components.update import DOMAIN as UPDATE
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import CONF_IP_ADDRESS, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity_platform import async_get_platforms
|
||||
@ -24,8 +24,6 @@ from . import configure_integration
|
||||
from .const import IP
|
||||
from .mock import MockDevice
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"device", ["mock_device", "mock_repeater_device", "mock_ipv6_device"]
|
||||
@ -50,27 +48,6 @@ async def test_setup_entry(
|
||||
assert device_info == snapshot
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_device")
|
||||
async def test_setup_without_password(hass: HomeAssistant) -> None:
|
||||
"""Test setup entry without a device password set like used before HA Core 2022.06."""
|
||||
config = {
|
||||
CONF_IP_ADDRESS: IP,
|
||||
}
|
||||
entry = MockConfigEntry(domain=DOMAIN, data=config)
|
||||
entry.add_to_hass(hass)
|
||||
# Patching async_forward_entry_setup* is not advisable, and should be refactored
|
||||
# in the future.
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.config_entries.ConfigEntries.async_forward_entry_setups",
|
||||
return_value=True,
|
||||
),
|
||||
patch("homeassistant.core.EventBus.async_listen_once"),
|
||||
):
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
assert entry.state is ConfigEntryState.LOADED
|
||||
|
||||
|
||||
async def test_setup_device_not_found(hass: HomeAssistant) -> None:
|
||||
"""Test setup entry."""
|
||||
entry = configure_integration(hass)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""Tests for the devolo Home Network switch."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
from devolo_plc_api.device_api import WifiGuestAccessGet
|
||||
from devolo_plc_api.exceptions.device import DevicePasswordProtected, DeviceUnavailable
|
||||
@ -16,6 +16,7 @@ from homeassistant.components.devolo_home_network.const import (
|
||||
from homeassistant.components.switch import DOMAIN as PLATFORM
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
SERVICE_TURN_OFF,
|
||||
SERVICE_TURN_ON,
|
||||
STATE_OFF,
|
||||
@ -106,18 +107,15 @@ async def test_update_enable_guest_wifi(
|
||||
mock_device.device.async_get_wifi_guest_access.return_value = WifiGuestAccessGet(
|
||||
enabled=False
|
||||
)
|
||||
with patch(
|
||||
"devolo_plc_api.device_api.deviceapi.DeviceApi.async_set_wifi_guest_access",
|
||||
new=AsyncMock(),
|
||||
) as turn_off:
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_OFF, {"entity_id": state_key}, blocking=True
|
||||
)
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_OFF
|
||||
turn_off.assert_called_once_with(False)
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_OFF
|
||||
mock_device.device.async_set_wifi_guest_access.assert_called_once_with(False)
|
||||
mock_device.device.async_set_wifi_guest_access.reset_mock()
|
||||
|
||||
freezer.tick(REQUEST_REFRESH_DEFAULT_COOLDOWN)
|
||||
async_fire_time_changed(hass)
|
||||
@ -127,18 +125,15 @@ async def test_update_enable_guest_wifi(
|
||||
mock_device.device.async_get_wifi_guest_access.return_value = WifiGuestAccessGet(
|
||||
enabled=True
|
||||
)
|
||||
with patch(
|
||||
"devolo_plc_api.device_api.deviceapi.DeviceApi.async_set_wifi_guest_access",
|
||||
new=AsyncMock(),
|
||||
) as turn_on:
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_ON, {"entity_id": state_key}, blocking=True
|
||||
)
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_ON, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_ON
|
||||
turn_on.assert_called_once_with(True)
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_ON
|
||||
mock_device.device.async_set_wifi_guest_access.assert_called_once_with(True)
|
||||
mock_device.device.async_set_wifi_guest_access.reset_mock()
|
||||
|
||||
freezer.tick(REQUEST_REFRESH_DEFAULT_COOLDOWN)
|
||||
async_fire_time_changed(hass)
|
||||
@ -146,17 +141,17 @@ async def test_update_enable_guest_wifi(
|
||||
|
||||
# Device unavailable
|
||||
mock_device.device.async_get_wifi_guest_access.side_effect = DeviceUnavailable()
|
||||
with patch(
|
||||
"devolo_plc_api.device_api.deviceapi.DeviceApi.async_set_wifi_guest_access",
|
||||
side_effect=DeviceUnavailable,
|
||||
mock_device.device.async_set_wifi_guest_access.side_effect = DeviceUnavailable()
|
||||
|
||||
with pytest.raises(
|
||||
HomeAssistantError, match=f"Device {entry.title} did not respond"
|
||||
):
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_ON, {"entity_id": state_key}, blocking=True
|
||||
PLATFORM, SERVICE_TURN_ON, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
await hass.config_entries.async_unload(entry.entry_id)
|
||||
|
||||
@ -191,18 +186,15 @@ async def test_update_enable_leds(
|
||||
|
||||
# Switch off
|
||||
mock_device.device.async_get_led_setting.return_value = False
|
||||
with patch(
|
||||
"devolo_plc_api.device_api.deviceapi.DeviceApi.async_set_led_setting",
|
||||
new=AsyncMock(),
|
||||
) as turn_off:
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_OFF, {"entity_id": state_key}, blocking=True
|
||||
)
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_OFF
|
||||
turn_off.assert_called_once_with(False)
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_OFF
|
||||
mock_device.device.async_set_led_setting.assert_called_once_with(False)
|
||||
mock_device.device.async_set_led_setting.reset_mock()
|
||||
|
||||
freezer.tick(REQUEST_REFRESH_DEFAULT_COOLDOWN)
|
||||
async_fire_time_changed(hass)
|
||||
@ -210,18 +202,15 @@ async def test_update_enable_leds(
|
||||
|
||||
# Switch on
|
||||
mock_device.device.async_get_led_setting.return_value = True
|
||||
with patch(
|
||||
"devolo_plc_api.device_api.deviceapi.DeviceApi.async_set_led_setting",
|
||||
new=AsyncMock(),
|
||||
) as turn_on:
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_ON, {"entity_id": state_key}, blocking=True
|
||||
)
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_ON, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_ON
|
||||
turn_on.assert_called_once_with(True)
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_ON
|
||||
mock_device.device.async_set_led_setting.assert_called_once_with(True)
|
||||
mock_device.device.async_set_led_setting.reset_mock()
|
||||
|
||||
freezer.tick(REQUEST_REFRESH_DEFAULT_COOLDOWN)
|
||||
async_fire_time_changed(hass)
|
||||
@ -229,17 +218,17 @@ async def test_update_enable_leds(
|
||||
|
||||
# Device unavailable
|
||||
mock_device.device.async_get_led_setting.side_effect = DeviceUnavailable()
|
||||
with patch(
|
||||
"devolo_plc_api.device_api.deviceapi.DeviceApi.async_set_led_setting",
|
||||
side_effect=DeviceUnavailable,
|
||||
mock_device.device.async_set_led_setting.side_effect = DeviceUnavailable()
|
||||
|
||||
with pytest.raises(
|
||||
HomeAssistantError, match=f"Device {entry.title} did not respond"
|
||||
):
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_OFF, {"entity_id": state_key}, blocking=True
|
||||
PLATFORM, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
state = hass.states.get(state_key)
|
||||
assert state is not None
|
||||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
await hass.config_entries.async_unload(entry.entry_id)
|
||||
|
||||
@ -308,7 +297,7 @@ async def test_auth_failed(
|
||||
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
PLATFORM, SERVICE_TURN_ON, {"entity_id": state_key}, blocking=True
|
||||
PLATFORM, SERVICE_TURN_ON, {ATTR_ENTITY_ID: state_key}, blocking=True
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
@ -1058,6 +1058,7 @@ async def test_browse_media(
|
||||
),
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -1070,6 +1071,7 @@ async def test_browse_media(
|
||||
"media_content_id": "media-source://media_source/local/test.mp3",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -1153,6 +1155,7 @@ async def test_browse_media_unfiltered(
|
||||
),
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
@ -1163,6 +1166,7 @@ async def test_browse_media_unfiltered(
|
||||
"media_content_id": "media-source://media_source/local/test.mp3",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
|
@ -103,6 +103,7 @@ ENTITY_IDS_BY_NUMBER = {
|
||||
"26": "light.living_room_rgbww_lights",
|
||||
"27": "media_player.group",
|
||||
"28": "media_player.browse",
|
||||
"29": "media_player.search",
|
||||
}
|
||||
|
||||
ENTITY_NUMBERS_BY_ID = {v: k for k, v in ENTITY_IDS_BY_NUMBER.items()}
|
||||
|
@ -6,10 +6,16 @@ from unittest.mock import patch
|
||||
from aioesphomeapi import DeviceInfo, InvalidAuthAPIError
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.esphome import CONF_NOISE_PSK, coordinator, dashboard
|
||||
from homeassistant.components.esphome import (
|
||||
CONF_NOISE_PSK,
|
||||
DOMAIN,
|
||||
coordinator,
|
||||
dashboard,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from . import VALID_NOISE_PSK
|
||||
|
||||
@ -34,7 +40,6 @@ async def test_dashboard_storage(
|
||||
|
||||
async def test_restore_dashboard_storage(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass_storage: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore dashboard url and slug from storage."""
|
||||
@ -47,14 +52,13 @@ async def test_restore_dashboard_storage(
|
||||
with patch.object(
|
||||
dashboard, "async_get_or_create_dashboard_manager"
|
||||
) as mock_get_or_create:
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
assert mock_get_or_create.call_count == 1
|
||||
|
||||
|
||||
async def test_restore_dashboard_storage_end_to_end(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass_storage: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore dashboard url and slug from storage."""
|
||||
@ -72,15 +76,13 @@ async def test_restore_dashboard_storage_end_to_end(
|
||||
"homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI"
|
||||
) as mock_dashboard_api,
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
assert mock_dashboard_api.mock_calls[0][1][0] == "http://new-host:6052"
|
||||
|
||||
|
||||
async def test_restore_dashboard_storage_skipped_if_addon_uninstalled(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass_storage: dict[str, Any],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
@ -103,27 +105,25 @@ async def test_restore_dashboard_storage_skipped_if_addon_uninstalled(
|
||||
return_value={},
|
||||
),
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await async_setup_component(hass, "hassio", {})
|
||||
await hass.async_block_till_done()
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
await hass.async_block_till_done() # wait for dashboard setup
|
||||
assert "test-slug is no longer installed" in caplog.text
|
||||
assert not mock_dashboard_api.called
|
||||
|
||||
|
||||
async def test_setup_dashboard_fails(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
hass_storage: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test that nothing is stored on failed dashboard setup when there was no dashboard before."""
|
||||
with patch.object(
|
||||
coordinator.ESPHomeDashboardAPI, "get_devices", side_effect=TimeoutError
|
||||
) as mock_get_devices:
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await async_setup_component(hass, DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
await dashboard.async_set_dashboard_info(hass, "test-slug", "test-host", 6052)
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
assert mock_get_devices.call_count == 1
|
||||
|
||||
# The dashboard addon might recover later so we still
|
||||
|
@ -70,6 +70,11 @@ def mock_power_sensor() -> Mock:
|
||||
}
|
||||
sensor.actions = {}
|
||||
sensor.has_central_scene_event = False
|
||||
sensor.raw_data = {
|
||||
"fibaro_id": 1,
|
||||
"name": "Test sensor",
|
||||
"properties": {"power": 6.6, "password": "mysecret"},
|
||||
}
|
||||
value_mock = Mock()
|
||||
value_mock.has_value = False
|
||||
value_mock.is_bool_value = False
|
||||
@ -123,6 +128,7 @@ def mock_light() -> Mock:
|
||||
light.properties = {"manufacturer": ""}
|
||||
light.actions = {"setValue": 1, "on": 0, "off": 0}
|
||||
light.supported_features = {}
|
||||
light.raw_data = {"fibaro_id": 3, "name": "Test light", "properties": {"value": 20}}
|
||||
value_mock = Mock()
|
||||
value_mock.has_value = True
|
||||
value_mock.int_value.return_value = 20
|
||||
|
57
tests/components/fibaro/snapshots/test_diagnostics.ambr
Normal file
57
tests/components/fibaro/snapshots/test_diagnostics.ambr
Normal file
@ -0,0 +1,57 @@
|
||||
# serializer version: 1
|
||||
# name: test_config_entry_diagnostics
|
||||
dict({
|
||||
'config': dict({
|
||||
'import_plugins': True,
|
||||
}),
|
||||
'fibaro_devices': list([
|
||||
dict({
|
||||
'fibaro_id': 3,
|
||||
'name': 'Test light',
|
||||
'properties': dict({
|
||||
'value': 20,
|
||||
}),
|
||||
}),
|
||||
]),
|
||||
})
|
||||
# ---
|
||||
# name: test_device_diagnostics
|
||||
dict({
|
||||
'config': dict({
|
||||
'import_plugins': True,
|
||||
}),
|
||||
'fibaro_devices': list([
|
||||
dict({
|
||||
'fibaro_id': 3,
|
||||
'name': 'Test light',
|
||||
'properties': dict({
|
||||
'value': 20,
|
||||
}),
|
||||
}),
|
||||
]),
|
||||
})
|
||||
# ---
|
||||
# name: test_device_diagnostics_for_hub
|
||||
dict({
|
||||
'config': dict({
|
||||
'import_plugins': True,
|
||||
}),
|
||||
'fibaro_devices': list([
|
||||
dict({
|
||||
'fibaro_id': 3,
|
||||
'name': 'Test light',
|
||||
'properties': dict({
|
||||
'value': 20,
|
||||
}),
|
||||
}),
|
||||
dict({
|
||||
'fibaro_id': 1,
|
||||
'name': 'Test sensor',
|
||||
'properties': dict({
|
||||
'password': '**REDACTED**',
|
||||
'power': 6.6,
|
||||
}),
|
||||
}),
|
||||
]),
|
||||
})
|
||||
# ---
|
96
tests/components/fibaro/test_diagnostics.py
Normal file
96
tests/components/fibaro/test_diagnostics.py
Normal file
@ -0,0 +1,96 @@
|
||||
"""Tests for the diagnostics data provided by the fibaro integration."""
|
||||
|
||||
from unittest.mock import Mock
|
||||
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.fibaro import DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .conftest import TEST_SERIALNUMBER, init_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.diagnostics import (
|
||||
get_diagnostics_for_config_entry,
|
||||
get_diagnostics_for_device,
|
||||
)
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
async def test_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_light: Mock,
|
||||
mock_room: Mock,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test diagnostics."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_light]
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
# Assert
|
||||
assert (
|
||||
await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry)
|
||||
== snapshot
|
||||
)
|
||||
|
||||
|
||||
async def test_device_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_light: Mock,
|
||||
mock_room: Mock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test diagnostics."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_light]
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
entry = entity_registry.async_get("light.room_1_test_light_3")
|
||||
device = device_registry.async_get(entry.device_id)
|
||||
# Assert
|
||||
assert device
|
||||
assert (
|
||||
await get_diagnostics_for_device(hass, hass_client, mock_config_entry, device)
|
||||
== snapshot
|
||||
)
|
||||
|
||||
|
||||
async def test_device_diagnostics_for_hub(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_light: Mock,
|
||||
mock_power_sensor: Mock,
|
||||
mock_room: Mock,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test diagnostics for the hub."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_light, mock_power_sensor]
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
device = device_registry.async_get_device({(DOMAIN, TEST_SERIALNUMBER)})
|
||||
# Assert
|
||||
assert device
|
||||
assert (
|
||||
await get_diagnostics_for_device(hass, hass_client, mock_config_entry, device)
|
||||
== snapshot
|
||||
)
|
@ -184,6 +184,7 @@ async def test_browse_media(
|
||||
"media_content_id": "media-source://media_source/local/test.mp3",
|
||||
"can_play": True,
|
||||
"can_expand": False,
|
||||
"can_search": False,
|
||||
"thumbnail": None,
|
||||
"children_media_class": None,
|
||||
}
|
||||
|
@ -259,6 +259,13 @@ DEMO_DEVICES = [
|
||||
"type": "action.devices.types.SETTOP",
|
||||
"willReportState": False,
|
||||
},
|
||||
{
|
||||
"id": "media_player.search",
|
||||
"name": {"name": "Search"},
|
||||
"traits": ["action.devices.traits.MediaState", "action.devices.traits.OnOff"],
|
||||
"type": "action.devices.types.SETTOP",
|
||||
"willReportState": False,
|
||||
},
|
||||
{
|
||||
"id": "fan.living_room_fan",
|
||||
"name": {"name": "Living Room Fan"},
|
||||
|
@ -3,10 +3,12 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
dict({
|
||||
'can_expand': False,
|
||||
'can_play': True,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'track',
|
||||
'media_content_id': 'heos://media/1/station?name=Today%27s+Hits+Radio&image_url=&playable=True&browsable=False&media_id=123456789',
|
||||
@ -28,6 +30,7 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
]),
|
||||
'children_media_class': None,
|
||||
@ -43,10 +46,12 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
dict({
|
||||
'can_expand': False,
|
||||
'can_play': True,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'music',
|
||||
'media_content_id': 'media-source://media_source/local/test.mp3',
|
||||
@ -68,10 +73,12 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'directory',
|
||||
'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user',
|
||||
@ -82,6 +89,7 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'directory',
|
||||
'media_content_id': 'heos://media/3/music_service?name=TuneIn&image_url=&available=False',
|
||||
@ -92,6 +100,7 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': 'music',
|
||||
'media_class': 'directory',
|
||||
'media_content_id': 'media-source://media_source/local/.',
|
||||
@ -113,10 +122,12 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'directory',
|
||||
'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user',
|
||||
@ -127,6 +138,7 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children_media_class': None,
|
||||
'media_class': 'directory',
|
||||
'media_content_id': 'heos://media/3/music_service?name=TuneIn&image_url=&available=False',
|
||||
@ -148,6 +160,7 @@
|
||||
dict({
|
||||
'can_expand': True,
|
||||
'can_play': False,
|
||||
'can_search': False,
|
||||
'children': list([
|
||||
]),
|
||||
'children_media_class': 'directory',
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user