mirror of
https://github.com/home-assistant/core.git
synced 2025-11-07 01:50:18 +00:00
Compare commits
109 Commits
cursor/add
...
front_end_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b70bd8402e | ||
|
|
65960aa3f7 | ||
|
|
a25afe2834 | ||
|
|
4cdfa3bddb | ||
|
|
9e7bef9fa7 | ||
|
|
68a1b1f91f | ||
|
|
1659ca532d | ||
|
|
8ea16daae4 | ||
|
|
5bd89acf9a | ||
|
|
2b8db74be4 | ||
|
|
d7f9a7114d | ||
|
|
f7a59eb86e | ||
|
|
37eef965ad | ||
|
|
b706430e66 | ||
|
|
5012aa5cb0 | ||
|
|
1c5f7adf4e | ||
|
|
ff364e3913 | ||
|
|
0e2a4605ff | ||
|
|
ca5b9ce0d3 | ||
|
|
953196ec21 | ||
|
|
b5be3d5ac3 | ||
|
|
5d9e8287d3 | ||
|
|
dc291708ae | ||
|
|
257e82fe4e | ||
|
|
ab6d4d645e | ||
|
|
58ebd84326 | ||
|
|
76b24dafed | ||
|
|
431f563ff6 | ||
|
|
e308e610c6 | ||
|
|
5e77cbd185 | ||
|
|
2dbc7ff4b7 | ||
|
|
49a6c5776d | ||
|
|
98f6001c9c | ||
|
|
ce38a93177 | ||
|
|
92fbf468f2 | ||
|
|
e09ec4a6f3 | ||
|
|
db63e0c829 | ||
|
|
8ed88d4a58 | ||
|
|
d098ada777 | ||
|
|
1add999c5a | ||
|
|
fad217837f | ||
|
|
983af1af7b | ||
|
|
bcf2c4e9b6 | ||
|
|
c72f2fd546 | ||
|
|
f54864a476 | ||
|
|
fe1ff456c6 | ||
|
|
ec25ead5ac | ||
|
|
e8277cb67c | ||
|
|
da0fb37a20 | ||
|
|
28675eee33 | ||
|
|
84561cbc41 | ||
|
|
4e48c881aa | ||
|
|
af8cd0414b | ||
|
|
f54076da29 | ||
|
|
1d0eb97592 | ||
|
|
57f1c268ef | ||
|
|
01402e4f96 | ||
|
|
6137a643d8 | ||
|
|
1badfe3aff | ||
|
|
a549104fe1 | ||
|
|
2aab2ddc55 | ||
|
|
42e01362a5 | ||
|
|
c3cf24ba25 | ||
|
|
7809fb6a9b | ||
|
|
144fc2a443 | ||
|
|
c67e005b2c | ||
|
|
1c6913eec2 | ||
|
|
fb5c4a1375 | ||
|
|
60b8392478 | ||
|
|
7145fb96dd | ||
|
|
37d94aca6d | ||
|
|
9b697edfca | ||
|
|
22e30be946 | ||
|
|
bc9d35b85f | ||
|
|
4dfb6e4983 | ||
|
|
09d78ab5ad | ||
|
|
b2ebdb7ef0 | ||
|
|
83d6a30b2e | ||
|
|
19dee6d22a | ||
|
|
afd27630fb | ||
|
|
cad1f1da1d | ||
|
|
cd62bd86fd | ||
|
|
79c3bc9eca | ||
|
|
10439eea4b | ||
|
|
75cc866e72 | ||
|
|
8b2ca6c571 | ||
|
|
52db73e8e3 | ||
|
|
79d15ec91c | ||
|
|
5af91df2b9 | ||
|
|
89a85c3d8c | ||
|
|
e44c6391b1 | ||
|
|
99d3234855 | ||
|
|
32cc5123f5 | ||
|
|
93415175bb | ||
|
|
f04bb69dbc | ||
|
|
9f8c9940bd | ||
|
|
496f527dff | ||
|
|
385e6f58a8 | ||
|
|
c8c37ad628 | ||
|
|
cc57732e24 | ||
|
|
6011df8952 | ||
|
|
08e494aba5 | ||
|
|
77c428e4c7 | ||
|
|
c22a2b93fa | ||
|
|
7f84363bf4 | ||
|
|
0980c3a270 | ||
|
|
7cec3aa27c | ||
|
|
1ddb39f6d0 | ||
|
|
10d2e38315 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -362,6 +362,7 @@ homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
homeassistant.components.network.*
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1817,8 +1817,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
"name": "Yale (non-US/Canada)",
|
||||
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
||||
}
|
||||
|
||||
5
homeassistant/brands/yale_august.json
Normal file
5
homeassistant/brands/yale_august.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "yale_august",
|
||||
"name": "Yale August (US/Canada)",
|
||||
"integrations": ["august", "august_ble"]
|
||||
}
|
||||
@@ -30,6 +30,7 @@ generate_data:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
@@ -57,3 +58,4 @@ generate_image:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
|
||||
@@ -58,7 +58,10 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import network
|
||||
from homeassistant.util import color as color_util, dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
)
|
||||
|
||||
from .config import AbstractConfig
|
||||
from .const import (
|
||||
@@ -844,7 +847,7 @@ def temperature_from_object(
|
||||
temp -= 273.15
|
||||
|
||||
if interval:
|
||||
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
||||
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.5.5"]
|
||||
"requirements": ["aioamazondevices==6.5.6"]
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="daily_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
key=TYPE_LIGHTNING_PER_DAY,
|
||||
translation_key="lightning_strikes_per_day",
|
||||
native_unit_of_measurement="strikes",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="monthly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="weekly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="yearly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
"""The blueprint integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import websocket_api
|
||||
@@ -30,7 +28,4 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the blueprint integration."""
|
||||
websocket_api.async_setup(hass)
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, Platform.UPDATE, DOMAIN, None, config)
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -204,8 +204,8 @@ class DomainBlueprints:
|
||||
self.hass = hass
|
||||
self.domain = domain
|
||||
self.logger = logger
|
||||
self.blueprint_in_use = blueprint_in_use
|
||||
self.reload_blueprint_consumers = reload_blueprint_consumers
|
||||
self._blueprint_in_use = blueprint_in_use
|
||||
self._reload_blueprint_consumers = reload_blueprint_consumers
|
||||
self._blueprints: dict[str, Blueprint | None] = {}
|
||||
self._load_lock = asyncio.Lock()
|
||||
self._blueprint_schema = blueprint_schema
|
||||
@@ -325,7 +325,7 @@ class DomainBlueprints:
|
||||
|
||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||
"""Remove a blueprint file."""
|
||||
if self.blueprint_in_use(self.hass, blueprint_path):
|
||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
||||
raise BlueprintInUse(self.domain, blueprint_path)
|
||||
path = self.blueprint_folder / blueprint_path
|
||||
await self.hass.async_add_executor_job(path.unlink)
|
||||
@@ -362,7 +362,7 @@ class DomainBlueprints:
|
||||
self._blueprints[blueprint_path] = blueprint
|
||||
|
||||
if overrides_existing:
|
||||
await self.reload_blueprint_consumers(self.hass, blueprint_path)
|
||||
await self._reload_blueprint_consumers(self.hass, blueprint_path)
|
||||
|
||||
return overrides_existing
|
||||
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
"""Update entities for blueprints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any, Final
|
||||
|
||||
from homeassistant.components import automation, script
|
||||
from . import importer, models
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.const import CONF_SOURCE_URL
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import event as event_helper
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DOMAIN as BLUEPRINT_DOMAIN
|
||||
from .errors import BlueprintException
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_LATEST_VERSION_PLACEHOLDER: Final = "remote"
|
||||
DATA_UPDATE_MANAGER: Final = "update_manager"
|
||||
REFRESH_INTERVAL: Final = timedelta(days=1)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BlueprintUsage:
|
||||
"""Details about a blueprint currently in use."""
|
||||
|
||||
domain: str
|
||||
path: str
|
||||
domain_blueprints: models.DomainBlueprints
|
||||
blueprint: models.Blueprint
|
||||
entities: list[str]
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the blueprint update platform."""
|
||||
data = hass.data.setdefault(BLUEPRINT_DOMAIN, {})
|
||||
|
||||
if (manager := data.get(DATA_UPDATE_MANAGER)) is None:
|
||||
manager = BlueprintUpdateManager(hass, async_add_entities)
|
||||
data[DATA_UPDATE_MANAGER] = manager
|
||||
await manager.async_start()
|
||||
return
|
||||
|
||||
manager.replace_add_entities(async_add_entities)
|
||||
await manager.async_recreate_entities()
|
||||
|
||||
|
||||
class BlueprintUpdateManager:
|
||||
"""Manage blueprint update entities based on blueprint usage."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Initialize the manager."""
|
||||
self.hass = hass
|
||||
self._async_add_entities = async_add_entities
|
||||
self._entities: dict[tuple[str, str], BlueprintUpdateEntity] = {}
|
||||
self._lock = asyncio.Lock()
|
||||
self._refresh_cancel: CALLBACK_TYPE | None = None
|
||||
self._started = False
|
||||
self._interval_unsub: CALLBACK_TYPE | None = None
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start tracking blueprint usage."""
|
||||
if self._started:
|
||||
return
|
||||
self._started = True
|
||||
|
||||
self._interval_unsub = event_helper.async_track_time_interval(
|
||||
self.hass, self._handle_time_interval, REFRESH_INTERVAL
|
||||
)
|
||||
await self.async_refresh_entities()
|
||||
|
||||
def replace_add_entities(self, async_add_entities: AddEntitiesCallback) -> None:
|
||||
"""Update the callback used to register entities."""
|
||||
self._async_add_entities = async_add_entities
|
||||
|
||||
async def async_recreate_entities(self) -> None:
|
||||
"""Recreate entities after the platform has been reloaded."""
|
||||
async with self._lock:
|
||||
entities = list(self._entities.values())
|
||||
self._entities.clear()
|
||||
|
||||
for entity in entities:
|
||||
await entity.async_remove()
|
||||
|
||||
await self.async_refresh_entities()
|
||||
|
||||
async def async_refresh_entities(self) -> None:
|
||||
"""Refresh update entities based on current blueprint usage."""
|
||||
async with self._lock:
|
||||
usage_map = await self._async_collect_in_use_blueprints()
|
||||
|
||||
current_keys = set(self._entities)
|
||||
new_keys = set(usage_map)
|
||||
|
||||
for key in current_keys - new_keys:
|
||||
entity = self._entities.pop(key)
|
||||
await entity.async_remove()
|
||||
|
||||
new_entities: list[BlueprintUpdateEntity] = []
|
||||
|
||||
for key in new_keys - current_keys:
|
||||
usage = usage_map[key]
|
||||
entity = BlueprintUpdateEntity(self, usage)
|
||||
self._entities[key] = entity
|
||||
new_entities.append(entity)
|
||||
|
||||
for key in new_keys & current_keys:
|
||||
self._entities[key].update_usage(usage_map[key])
|
||||
self._entities[key].async_write_ha_state()
|
||||
|
||||
if new_entities:
|
||||
self._async_add_entities(new_entities)
|
||||
|
||||
def async_schedule_refresh(self) -> None:
|
||||
"""Schedule an asynchronous refresh."""
|
||||
if self._refresh_cancel is not None:
|
||||
return
|
||||
|
||||
self._refresh_cancel = event_helper.async_call_later(
|
||||
self.hass, 0, self._handle_scheduled_refresh
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_scheduled_refresh(self, _now: Any) -> None:
|
||||
"""Run a scheduled refresh task."""
|
||||
self._refresh_cancel = None
|
||||
self.hass.async_create_task(self.async_refresh_entities())
|
||||
|
||||
@callback
|
||||
def _handle_time_interval(self, _now: Any) -> None:
|
||||
"""Handle scheduled interval refresh."""
|
||||
self.async_schedule_refresh()
|
||||
|
||||
async def _async_collect_in_use_blueprints(self) -> dict[tuple[str, str], BlueprintUsage]:
|
||||
"""Collect blueprint usage information for automations and scripts."""
|
||||
|
||||
usage_keys: set[tuple[str, str]] = set()
|
||||
|
||||
if automation.DATA_COMPONENT in self.hass.data:
|
||||
component = self.hass.data[automation.DATA_COMPONENT]
|
||||
for automation_entity in list(component.entities):
|
||||
if (path := getattr(automation_entity, "referenced_blueprint", None)):
|
||||
usage_keys.add((automation.DOMAIN, path))
|
||||
|
||||
if script.DOMAIN in self.hass.data:
|
||||
component = self.hass.data[script.DOMAIN]
|
||||
for script_entity in list(component.entities):
|
||||
if (path := getattr(script_entity, "referenced_blueprint", None)):
|
||||
usage_keys.add((script.DOMAIN, path))
|
||||
|
||||
domain_blueprints_map = self.hass.data.get(BLUEPRINT_DOMAIN, {})
|
||||
usage_map: dict[tuple[str, str], BlueprintUsage] = {}
|
||||
|
||||
for domain, path in usage_keys:
|
||||
domain_blueprints: models.DomainBlueprints | None = domain_blueprints_map.get(
|
||||
domain
|
||||
)
|
||||
|
||||
if domain_blueprints is None:
|
||||
continue
|
||||
|
||||
if not domain_blueprints.blueprint_in_use(self.hass, path):
|
||||
continue
|
||||
|
||||
try:
|
||||
blueprint = await domain_blueprints.async_get_blueprint(path)
|
||||
except BlueprintException:
|
||||
continue
|
||||
|
||||
source_url = blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
if not source_url:
|
||||
continue
|
||||
|
||||
if domain == automation.DOMAIN:
|
||||
entities = automation.automations_with_blueprint(self.hass, path)
|
||||
elif domain == script.DOMAIN:
|
||||
entities = script.scripts_with_blueprint(self.hass, path)
|
||||
else:
|
||||
entities = []
|
||||
|
||||
usage_map[(domain, path)] = BlueprintUsage(
|
||||
domain=domain,
|
||||
path=path,
|
||||
domain_blueprints=domain_blueprints,
|
||||
blueprint=blueprint,
|
||||
entities=entities,
|
||||
)
|
||||
|
||||
return usage_map
|
||||
|
||||
|
||||
class BlueprintUpdateEntity(UpdateEntity):
|
||||
"""Define a blueprint update entity."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(self, manager: BlueprintUpdateManager, usage: BlueprintUsage) -> None:
|
||||
"""Initialize the update entity."""
|
||||
self._manager = manager
|
||||
self._domain = usage.domain
|
||||
self._path = usage.path
|
||||
self._domain_blueprints = usage.domain_blueprints
|
||||
self._blueprint = usage.blueprint
|
||||
self._entities_in_use = usage.entities
|
||||
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
self._attr_unique_id = f"{self._domain}:{self._path}"
|
||||
self._attr_in_progress = False
|
||||
|
||||
self.update_usage(usage)
|
||||
|
||||
@callback
|
||||
def update_usage(self, usage: BlueprintUsage) -> None:
|
||||
"""Update the entity with latest usage information."""
|
||||
self._domain_blueprints = usage.domain_blueprints
|
||||
self._blueprint = usage.blueprint
|
||||
self._entities_in_use = usage.entities
|
||||
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
|
||||
self._attr_name = usage.blueprint.name
|
||||
self._attr_release_summary = usage.blueprint.metadata.get("description")
|
||||
self._attr_installed_version = usage.blueprint.metadata.get("version")
|
||||
self._attr_release_url = self._source_url
|
||||
self._attr_available = self._source_url is not None
|
||||
self._attr_latest_version = (
|
||||
_LATEST_VERSION_PLACEHOLDER
|
||||
if self._source_url is not None
|
||||
else self._attr_installed_version
|
||||
)
|
||||
|
||||
async def async_install(self, version: str | None, backup: bool) -> None:
|
||||
"""Install (refresh) the blueprint from its source."""
|
||||
if self._source_url is None:
|
||||
raise HomeAssistantError("Blueprint does not define a source URL")
|
||||
|
||||
self._attr_in_progress = True
|
||||
self.async_write_ha_state()
|
||||
usage: BlueprintUsage | None = None
|
||||
|
||||
try:
|
||||
imported = await importer.fetch_blueprint_from_url(
|
||||
self.hass, self._source_url
|
||||
)
|
||||
blueprint = imported.blueprint
|
||||
|
||||
if blueprint.domain != self._domain:
|
||||
raise HomeAssistantError(
|
||||
"Downloaded blueprint domain does not match the existing blueprint"
|
||||
)
|
||||
|
||||
await self._domain_blueprints.async_add_blueprint(
|
||||
blueprint, self._path, allow_override=True
|
||||
)
|
||||
|
||||
usage = BlueprintUsage(
|
||||
domain=self._domain,
|
||||
path=self._path,
|
||||
domain_blueprints=self._domain_blueprints,
|
||||
blueprint=blueprint,
|
||||
entities=self._entities_in_use,
|
||||
)
|
||||
|
||||
except HomeAssistantError:
|
||||
raise
|
||||
except Exception as err: # noqa: BLE001 - Provide context for unexpected errors
|
||||
raise HomeAssistantError("Failed to update blueprint from source") from err
|
||||
finally:
|
||||
self._attr_in_progress = False
|
||||
|
||||
if usage is not None:
|
||||
self.update_usage(usage)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._manager.async_schedule_refresh()
|
||||
@@ -13,6 +13,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
@@ -21,6 +22,7 @@ from .const import (
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPE_LASER,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
@@ -28,7 +30,12 @@ from .const import (
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
@@ -42,7 +49,12 @@ DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
|
||||
@@ -7,7 +7,10 @@ from typing import Final
|
||||
|
||||
DOMAIN: Final = "brother"
|
||||
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
PRINTER_TYPE_LASER = "laser"
|
||||
PRINTER_TYPE_INK = "ink"
|
||||
|
||||
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@@ -27,6 +27,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_COUNTER = "counter"
|
||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||
|
||||
|
||||
@@ -38,11 +38,11 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"type": "Type of the printer"
|
||||
"type": "Printer type"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "Brother printer type: ink or laser."
|
||||
"type": "The type of the Brother printer."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
@@ -210,5 +210,13 @@
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"printer_type": {
|
||||
"options": {
|
||||
"ink": "ink",
|
||||
"laser": "laser"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
|
||||
return HVACAction.HEATING
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_parse_hvac_mode",
|
||||
translation_key="failed_to_parse_hvac_action",
|
||||
translation_placeholders={
|
||||
"mode_and_active": mode_and_active,
|
||||
"current_temperature": str(self.current_temperature),
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"failed_to_parse_hvac_action": {
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
|
||||
},
|
||||
"failed_to_parse_hvac_mode": {
|
||||
"message": "Cannot parse response to HVACMode: {mode}"
|
||||
|
||||
@@ -6,3 +6,5 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pycoolmasternet_async import CoolMasterNet
|
||||
@@ -12,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +47,34 @@ class CoolmasterDataUpdateCoordinator(
|
||||
|
||||
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
||||
"""Fetch data from Coolmaster."""
|
||||
try:
|
||||
return await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
raise UpdateFailed from error
|
||||
retries_left = MAX_RETRIES
|
||||
status: dict[str, CoolMasterNetUnit] = {}
|
||||
while retries_left > 0 and not status:
|
||||
retries_left -= 1
|
||||
try:
|
||||
status = await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
if retries_left == 0:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
|
||||
) from error
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster (%d retries left): %s",
|
||||
retries_left,
|
||||
str(error),
|
||||
)
|
||||
else:
|
||||
if status:
|
||||
return status
|
||||
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster: empty status received (%d retries left)",
|
||||
retries_left,
|
||||
)
|
||||
|
||||
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
|
||||
await asyncio.sleep(backoff)
|
||||
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
|
||||
)
|
||||
|
||||
@@ -81,6 +81,9 @@
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
@@ -89,9 +92,6 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"error": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
||||
}
|
||||
|
||||
@@ -5,8 +5,9 @@ from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.command import CommandWithMessageHandling
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events import WorkModeEvent, auto_empty
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
@@ -34,6 +35,9 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
||||
|
||||
current_option_fn: Callable[[EventT], str | None]
|
||||
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
||||
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
|
||||
lambda cap, option: cap.set(option)
|
||||
)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
@@ -58,6 +62,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
|
||||
set_option_fn=lambda cap, option: cap.set(None, option),
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -106,14 +118,17 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_event(event: EventT) -> None:
|
||||
self._attr_current_option = self.entity_description.current_option_fn(event)
|
||||
self.async_write_ha_state()
|
||||
if (option := self.entity_description.current_option_fn(event)) is not None:
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.event, on_event)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
await self._device.execute_command(
|
||||
self.entity_description.set_option_fn(self._capability, option)
|
||||
)
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
|
||||
@@ -17,7 +17,6 @@ from deebot_client.events import (
|
||||
NetworkInfoEvent,
|
||||
StatsEvent,
|
||||
TotalStatsEvent,
|
||||
auto_empty,
|
||||
station,
|
||||
)
|
||||
from sucks import VacBot
|
||||
@@ -159,14 +158,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=get_options(station.State),
|
||||
),
|
||||
EcovacsSensorEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
value_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=get_options(auto_empty.Frequency),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -129,6 +129,16 @@
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"min_10": "10 minutes",
|
||||
"min_15": "15 minutes",
|
||||
"min_25": "25 minutes",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
@@ -149,13 +159,6 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"name": "Error",
|
||||
"state_attributes": {
|
||||
|
||||
@@ -296,7 +296,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_discovered_connection()
|
||||
return await self.async_step_manual_connection()
|
||||
|
||||
current_unique_ids = self._async_current_ids()
|
||||
current_unique_ids = self._async_current_ids(include_ignore=False)
|
||||
current_hosts = {
|
||||
hostname_from_url(entry.data[CONF_HOST])
|
||||
for entry in self._async_current_entries(include_ignore=False)
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Virtual integration: Enmax Energy."""
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "enmax",
|
||||
"name": "Enmax Energy",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
}
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.5.0",
|
||||
"aioesphomeapi==42.6.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -77,7 +77,7 @@ class EufyLifeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data={CONF_MODEL: model},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if (
|
||||
|
||||
@@ -129,6 +129,51 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconf_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reconf_entry.data,
|
||||
**user_input,
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
return self.async_update_reload_and_abort(
|
||||
reconf_entry,
|
||||
data_updates={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values=user_input or reconf_entry.data.copy(),
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -20,6 +21,20 @@
|
||||
},
|
||||
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::firefly_iii::config::step::user::data_description::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:component::firefly_iii::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Firefly III instance.",
|
||||
"title": "Reconfigure Firefly III Integration"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
|
||||
@@ -453,7 +453,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
async_register_built_in_panel(hass, "light")
|
||||
async_register_built_in_panel(hass, "safety")
|
||||
async_register_built_in_panel(hass, "security")
|
||||
async_register_built_in_panel(hass, "climate")
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251029.1"]
|
||||
"requirements": ["home-assistant-frontend==20251103.0"]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,9 @@ from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DATA_STORAGE: HassKey[dict[str, UserStore]] = HassKey("frontend_storage")
|
||||
DATA_SYSTEM_STORAGE: HassKey[SystemStore] = HassKey("frontend_system_storage")
|
||||
STORAGE_VERSION_USER_DATA = 1
|
||||
STORAGE_VERSION_SYSTEM_DATA = 1
|
||||
|
||||
|
||||
async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
@@ -23,6 +25,9 @@ async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_set_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_get_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_user_data)
|
||||
websocket_api.async_register_command(hass, websocket_set_system_data)
|
||||
websocket_api.async_register_command(hass, websocket_get_system_data)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_system_data)
|
||||
|
||||
|
||||
async def async_user_store(hass: HomeAssistant, user_id: str) -> UserStore:
|
||||
@@ -83,6 +88,62 @@ class _UserStore(Store[dict[str, Any]]):
|
||||
)
|
||||
|
||||
|
||||
async def async_system_store(hass: HomeAssistant) -> SystemStore:
|
||||
"""Access the system store."""
|
||||
if DATA_SYSTEM_STORAGE not in hass.data:
|
||||
store = hass.data[DATA_SYSTEM_STORAGE] = SystemStore(hass)
|
||||
await store.async_load()
|
||||
return hass.data[DATA_SYSTEM_STORAGE]
|
||||
|
||||
|
||||
class SystemStore:
|
||||
"""System store for frontend data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the system store."""
|
||||
self._store = _SystemStore(hass)
|
||||
self.data: dict[str, Any] = {}
|
||||
self.subscriptions: dict[str | None, list[Callable[[], None]]] = {}
|
||||
|
||||
async def async_load(self) -> None:
|
||||
"""Load the data from the store."""
|
||||
self.data = await self._store.async_load() or {}
|
||||
|
||||
async def async_set_item(self, key: str, value: Any) -> None:
|
||||
"""Set an item and save the store."""
|
||||
self.data[key] = value
|
||||
await self._store.async_save(self.data)
|
||||
for cb in self.subscriptions.get(None, []):
|
||||
cb()
|
||||
for cb in self.subscriptions.get(key, []):
|
||||
cb()
|
||||
|
||||
@callback
|
||||
def async_subscribe(
|
||||
self, key: str | None, on_update_callback: Callable[[], None]
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to store updates."""
|
||||
self.subscriptions.setdefault(key, []).append(on_update_callback)
|
||||
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from the store."""
|
||||
self.subscriptions[key].remove(on_update_callback)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
|
||||
class _SystemStore(Store[dict[str, Any]]):
|
||||
"""System store for frontend data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the system store."""
|
||||
super().__init__(
|
||||
hass,
|
||||
STORAGE_VERSION_SYSTEM_DATA,
|
||||
"frontend.system_data",
|
||||
)
|
||||
|
||||
|
||||
def with_user_store(
|
||||
orig_func: Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any], UserStore],
|
||||
@@ -107,6 +168,28 @@ def with_user_store(
|
||||
return with_user_store_func
|
||||
|
||||
|
||||
def with_system_store(
|
||||
orig_func: Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any], SystemStore],
|
||||
Coroutine[Any, Any, None],
|
||||
],
|
||||
) -> Callable[
|
||||
[HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None]
|
||||
]:
|
||||
"""Decorate function to provide system store."""
|
||||
|
||||
@wraps(orig_func)
|
||||
async def with_system_store_func(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Provide system store to function."""
|
||||
store = await async_system_store(hass)
|
||||
|
||||
await orig_func(hass, connection, msg, store)
|
||||
|
||||
return with_system_store_func
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/set_user_data",
|
||||
@@ -169,3 +252,74 @@ async def websocket_subscribe_user_data(
|
||||
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
|
||||
on_data_update()
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/set_system_data",
|
||||
vol.Required("key"): str,
|
||||
vol.Required("value"): vol.Any(bool, str, int, float, dict, list, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_set_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle set system data command."""
|
||||
if not connection.user.is_admin:
|
||||
connection.send_error(msg["id"], "unauthorized", "Admin access required")
|
||||
return
|
||||
|
||||
await store.async_set_item(msg["key"], msg["value"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "frontend/get_system_data", vol.Optional("key"): str}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_get_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle get system data command."""
|
||||
data = store.data
|
||||
connection.send_result(
|
||||
msg["id"], {"value": data.get(msg["key"]) if "key" in msg else data}
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "frontend/subscribe_system_data",
|
||||
vol.Optional("key"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@with_system_store
|
||||
async def websocket_subscribe_system_data(
|
||||
hass: HomeAssistant,
|
||||
connection: ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
store: SystemStore,
|
||||
) -> None:
|
||||
"""Handle subscribe to system data command."""
|
||||
key: str | None = msg.get("key")
|
||||
|
||||
def on_data_update() -> None:
|
||||
"""Handle system data update."""
|
||||
data = store.data
|
||||
connection.send_event(
|
||||
msg["id"], {"value": data.get(key) if key is not None else data}
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
|
||||
on_data_update()
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -43,6 +43,9 @@ from .coordinator import GiosConfigEntry, GiosDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GiosSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
@@ -14,6 +14,10 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"station_id": "Measuring station"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "Config entry name, by default, this is the name of your Home Assistant instance.",
|
||||
"station_id": "The name of the measuring station where the environmental data is collected."
|
||||
},
|
||||
"title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import itertools
|
||||
|
||||
from aiohasupervisor.models.mounts import MountState
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -13,8 +16,14 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
|
||||
from .entity import HassioAddonEntity
|
||||
from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
ATTR_STARTED,
|
||||
ATTR_STATE,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_MOUNTS,
|
||||
)
|
||||
from .entity import HassioAddonEntity, HassioMountEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -34,6 +43,16 @@ ADDON_ENTITY_DESCRIPTIONS = (
|
||||
),
|
||||
)
|
||||
|
||||
MOUNT_ENTITY_DESCRIPTIONS = (
|
||||
HassioBinarySensorEntityDescription(
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_registry_enabled_default=False,
|
||||
key=ATTR_STATE,
|
||||
translation_key="mount",
|
||||
target=MountState.ACTIVE.value,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -44,13 +63,26 @@ async def async_setup_entry(
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
itertools.chain(
|
||||
[
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
[
|
||||
HassioMountBinarySensor(
|
||||
mount=mount,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for mount in coordinator.data[DATA_KEY_MOUNTS].values()
|
||||
for entity_description in MOUNT_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
@@ -68,3 +100,20 @@ class HassioAddonBinarySensor(HassioAddonEntity, BinarySensorEntity):
|
||||
if self.entity_description.target is None:
|
||||
return value
|
||||
return value == self.entity_description.target
|
||||
|
||||
|
||||
class HassioMountBinarySensor(HassioMountEntity, BinarySensorEntity):
|
||||
"""Binary sensor for Hass.io mount."""
|
||||
|
||||
entity_description: HassioBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
value = getattr(
|
||||
self.coordinator.data[DATA_KEY_MOUNTS][self._mount.name],
|
||||
self.entity_description.key,
|
||||
)
|
||||
if self.entity_description.target is None:
|
||||
return value
|
||||
return value == self.entity_description.target
|
||||
|
||||
@@ -90,6 +90,7 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
|
||||
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
|
||||
DATA_ADDONS_INFO = "hassio_addons_info"
|
||||
DATA_ADDONS_STATS = "hassio_addons_stats"
|
||||
DATA_MOUNTS_INFO = "hassio_mounts_info"
|
||||
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
ATTR_AUTO_UPDATE = "auto_update"
|
||||
@@ -110,6 +111,7 @@ DATA_KEY_SUPERVISOR = "supervisor"
|
||||
DATA_KEY_CORE = "core"
|
||||
DATA_KEY_HOST = "host"
|
||||
DATA_KEY_SUPERVISOR_ISSUES = "supervisor_issues"
|
||||
DATA_KEY_MOUNTS = "mounts"
|
||||
|
||||
PLACEHOLDER_KEY_ADDON = "addon"
|
||||
PLACEHOLDER_KEY_ADDON_INFO = "addon_info"
|
||||
@@ -174,3 +176,4 @@ class SupervisorEntityModel(StrEnum):
|
||||
CORE = "Home Assistant Core"
|
||||
SUPERVISOR = "Home Assistant Supervisor"
|
||||
HOST = "Home Assistant Host"
|
||||
MOUNT = "Home Assistant Mount"
|
||||
|
||||
@@ -10,6 +10,11 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
|
||||
from aiohasupervisor.models import StoreInfo
|
||||
from aiohasupervisor.models.mounts import (
|
||||
CIFSMountResponse,
|
||||
MountsInfo,
|
||||
NFSMountResponse,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
|
||||
@@ -41,9 +46,11 @@ from .const import (
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_MOUNTS,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DATA_KEY_SUPERVISOR_ISSUES,
|
||||
DATA_MOUNTS_INFO,
|
||||
DATA_NETWORK_INFO,
|
||||
DATA_OS_INFO,
|
||||
DATA_STORE,
|
||||
@@ -174,6 +181,16 @@ def get_core_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
return hass.data.get(DATA_CORE_INFO)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_mounts_info(hass: HomeAssistant) -> MountsInfo | None:
|
||||
"""Return Home Assistant mounts information from Supervisor.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
return hass.data.get(DATA_MOUNTS_INFO)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_issues_info(hass: HomeAssistant) -> SupervisorIssues | None:
|
||||
@@ -203,6 +220,25 @@ def async_register_addons_in_dev_reg(
|
||||
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_mounts_in_dev_reg(
|
||||
entry_id: str,
|
||||
dev_reg: dr.DeviceRegistry,
|
||||
mounts: list[CIFSMountResponse | NFSMountResponse],
|
||||
) -> None:
|
||||
"""Register mounts in the device registry."""
|
||||
for mount in mounts:
|
||||
params = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"mount_{mount.name}")},
|
||||
manufacturer="Home Assistant",
|
||||
model=SupervisorEntityModel.MOUNT,
|
||||
model_id=f"{mount.usage}/{mount.type}",
|
||||
name=mount.name,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_os_in_dev_reg(
|
||||
entry_id: str, dev_reg: dr.DeviceRegistry, os_dict: dict[str, Any]
|
||||
@@ -272,12 +308,12 @@ def async_register_supervisor_in_dev_reg(
|
||||
|
||||
|
||||
@callback
|
||||
def async_remove_addons_from_dev_reg(
|
||||
dev_reg: dr.DeviceRegistry, addons: set[str]
|
||||
def async_remove_devices_from_dev_reg(
|
||||
dev_reg: dr.DeviceRegistry, devices: set[str]
|
||||
) -> None:
|
||||
"""Remove addons from the device registry."""
|
||||
for addon_slug in addons:
|
||||
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, addon_slug)}):
|
||||
"""Remove devices from the device registry."""
|
||||
for device in devices:
|
||||
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, device)}):
|
||||
dev_reg.async_remove_device(dev.id)
|
||||
|
||||
|
||||
@@ -362,12 +398,19 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
**get_supervisor_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||
new_data[DATA_KEY_MOUNTS] = {
|
||||
mount.name: mount
|
||||
for mount in getattr(get_mounts_info(self.hass), "mounts", [])
|
||||
}
|
||||
|
||||
# If this is the initial refresh, register all addons and return the dict
|
||||
if is_first_update:
|
||||
async_register_addons_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
async_register_mounts_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
|
||||
)
|
||||
async_register_core_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
||||
)
|
||||
@@ -389,7 +432,20 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if device.model == SupervisorEntityModel.ADDON
|
||||
}
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
||||
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
# Remove mounts that no longer exists from device registry
|
||||
supervisor_mount_devices = {
|
||||
device.name
|
||||
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
|
||||
self.entry_id
|
||||
)
|
||||
if device.model == SupervisorEntityModel.MOUNT
|
||||
}
|
||||
if stale_mounts := supervisor_mount_devices - set(new_data[DATA_KEY_MOUNTS]):
|
||||
async_remove_devices_from_dev_reg(
|
||||
self.dev_reg, {f"mount_{stale_mount}" for stale_mount in stale_mounts}
|
||||
)
|
||||
|
||||
if not self.is_hass_os and (
|
||||
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
||||
@@ -397,11 +453,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
# If there are new add-ons, we should reload the config entry so we can
|
||||
# If there are new add-ons or mounts, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
if self.data and set(new_data[DATA_KEY_ADDONS]) - set(
|
||||
self.data[DATA_KEY_ADDONS]
|
||||
if self.data and (
|
||||
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
|
||||
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.entry_id)
|
||||
@@ -428,6 +485,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
DATA_CORE_INFO: hassio.get_core_info(),
|
||||
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
|
||||
DATA_OS_INFO: hassio.get_os_info(),
|
||||
DATA_MOUNTS_INFO: self.supervisor_client.mounts.info(),
|
||||
}
|
||||
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
||||
updates[DATA_CORE_STATS] = hassio.get_core_stats()
|
||||
|
||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -15,6 +17,7 @@ from .const import (
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_MOUNTS,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DOMAIN,
|
||||
@@ -192,3 +195,34 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
)
|
||||
if CONTAINER_STATS in update_types:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Mount."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
mount: CIFSMountResponse | NFSMountResponse,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = (
|
||||
f"home_assistant_mount_{mount.name}_{entity_description.key}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"mount_{mount.name}")}
|
||||
)
|
||||
self._mount = mount
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._mount.name in self.coordinator.data[DATA_KEY_MOUNTS]
|
||||
)
|
||||
|
||||
@@ -44,7 +44,6 @@ from .const import (
|
||||
EVENT_SUPPORTED_CHANGED,
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
@@ -87,7 +86,6 @@ ISSUE_KEYS_FOR_REPAIRS = {
|
||||
"issue_system_disk_lifetime",
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"mount": {
|
||||
"name": "Connected"
|
||||
},
|
||||
"state": {
|
||||
"name": "Running"
|
||||
}
|
||||
|
||||
@@ -75,6 +75,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
|
||||
context: ConfigFlowContext
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -112,7 +113,6 @@ class HomeAssistantConnectZBT2ConfigFlow(
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the config flow."""
|
||||
|
||||
@@ -20,6 +20,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"rf_message_rssi": {
|
||||
"default": "mdi:signal"
|
||||
}
|
||||
},
|
||||
"water_heater": {
|
||||
"boiler": {
|
||||
"state": {
|
||||
|
||||
@@ -61,6 +61,16 @@ SENSOR_TYPES: tuple[IncomfortSensorEntityDescription, ...] = (
|
||||
value_key="tap_temp",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# A lower RSSI value is better
|
||||
# A typical RSSI value is 28 for connection just in range
|
||||
IncomfortSensorEntityDescription(
|
||||
key="rf_message_rssi",
|
||||
translation_key="rf_message_rssi",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_key="rf_message_rssi",
|
||||
extra_key="rfstatus_cntr",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -76,6 +76,9 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"rf_message_rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"tap_temperature": {
|
||||
"name": "Tap temperature"
|
||||
}
|
||||
|
||||
@@ -54,14 +54,15 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
API_VERSION_2,
|
||||
BATCH_BUFFER_SIZE,
|
||||
BATCH_TIMEOUT,
|
||||
CATCHING_UP_MESSAGE,
|
||||
CLIENT_ERROR_V1,
|
||||
CLIENT_ERROR_V2,
|
||||
CODE_INVALID_INPUTS,
|
||||
COMPONENT_CONFIG_SCHEMA_BATCH,
|
||||
COMPONENT_CONFIG_SCHEMA_CONNECTION,
|
||||
CONF_API_VERSION,
|
||||
CONF_BATCH_BUFFER_SIZE,
|
||||
CONF_BATCH_TIMEOUT,
|
||||
CONF_BUCKET,
|
||||
CONF_COMPONENT_CONFIG,
|
||||
CONF_COMPONENT_CONFIG_DOMAIN,
|
||||
@@ -193,7 +194,12 @@ _INFLUX_BASE_SCHEMA = INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
|
||||
)
|
||||
|
||||
INFLUX_SCHEMA = vol.All(
|
||||
_INFLUX_BASE_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION),
|
||||
_INFLUX_BASE_SCHEMA.extend(
|
||||
{
|
||||
**COMPONENT_CONFIG_SCHEMA_CONNECTION,
|
||||
**COMPONENT_CONFIG_SCHEMA_BATCH,
|
||||
}
|
||||
),
|
||||
validate_version_specific_config,
|
||||
create_influx_url,
|
||||
)
|
||||
@@ -496,7 +502,9 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
event_to_json = _generate_event_to_json(conf)
|
||||
max_tries = conf.get(CONF_RETRY_COUNT)
|
||||
instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries)
|
||||
instance = hass.data[DOMAIN] = InfluxThread(
|
||||
hass, influx, event_to_json, max_tries, conf
|
||||
)
|
||||
instance.start()
|
||||
|
||||
def shutdown(event):
|
||||
@@ -513,7 +521,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class InfluxThread(threading.Thread):
|
||||
"""A threaded event handler class."""
|
||||
|
||||
def __init__(self, hass, influx, event_to_json, max_tries):
|
||||
def __init__(self, hass, influx, event_to_json, max_tries, config):
|
||||
"""Initialize the listener."""
|
||||
threading.Thread.__init__(self, name=DOMAIN)
|
||||
self.queue: queue.SimpleQueue[threading.Event | tuple[float, Event] | None] = (
|
||||
@@ -524,6 +532,8 @@ class InfluxThread(threading.Thread):
|
||||
self.max_tries = max_tries
|
||||
self.write_errors = 0
|
||||
self.shutdown = False
|
||||
self._batch_timeout = config[CONF_BATCH_TIMEOUT]
|
||||
self.batch_buffer_size = config[CONF_BATCH_BUFFER_SIZE]
|
||||
hass.bus.listen(EVENT_STATE_CHANGED, self._event_listener)
|
||||
|
||||
@callback
|
||||
@@ -532,23 +542,31 @@ class InfluxThread(threading.Thread):
|
||||
item = (time.monotonic(), event)
|
||||
self.queue.put(item)
|
||||
|
||||
@staticmethod
|
||||
def batch_timeout():
|
||||
@property
|
||||
def batch_timeout(self):
|
||||
"""Return number of seconds to wait for more events."""
|
||||
return BATCH_TIMEOUT
|
||||
return self._batch_timeout
|
||||
|
||||
def get_events_json(self):
|
||||
"""Return a batch of events formatted for writing."""
|
||||
queue_seconds = QUEUE_BACKLOG_SECONDS + self.max_tries * RETRY_DELAY
|
||||
start_time = time.monotonic()
|
||||
batch_timeout = self.batch_timeout()
|
||||
|
||||
count = 0
|
||||
json = []
|
||||
|
||||
dropped = 0
|
||||
|
||||
with suppress(queue.Empty):
|
||||
while len(json) < BATCH_BUFFER_SIZE and not self.shutdown:
|
||||
timeout = None if count == 0 else self.batch_timeout()
|
||||
while len(json) < self.batch_buffer_size and not self.shutdown:
|
||||
if count > 0 and time.monotonic() - start_time >= batch_timeout:
|
||||
break
|
||||
|
||||
timeout = (
|
||||
None
|
||||
if count == 0
|
||||
else batch_timeout - (time.monotonic() - start_time)
|
||||
)
|
||||
item = self.queue.get(timeout=timeout)
|
||||
count += 1
|
||||
|
||||
|
||||
@@ -47,6 +47,9 @@ CONF_FUNCTION = "function"
|
||||
CONF_QUERY = "query"
|
||||
CONF_IMPORTS = "imports"
|
||||
|
||||
CONF_BATCH_BUFFER_SIZE = "batch_buffer_size"
|
||||
CONF_BATCH_TIMEOUT = "batch_timeout"
|
||||
|
||||
DEFAULT_DATABASE = "home_assistant"
|
||||
DEFAULT_HOST_V2 = "us-west-2-1.aws.cloud2.influxdata.com"
|
||||
DEFAULT_SSL_V2 = True
|
||||
@@ -60,6 +63,9 @@ DEFAULT_RANGE_STOP = "now()"
|
||||
DEFAULT_FUNCTION_FLUX = "|> limit(n: 1)"
|
||||
DEFAULT_MEASUREMENT_ATTR = "unit_of_measurement"
|
||||
|
||||
DEFAULT_BATCH_BUFFER_SIZE = 100
|
||||
DEFAULT_BATCH_TIMEOUT = 1
|
||||
|
||||
INFLUX_CONF_MEASUREMENT = "measurement"
|
||||
INFLUX_CONF_TAGS = "tags"
|
||||
INFLUX_CONF_TIME = "time"
|
||||
@@ -76,8 +82,6 @@ TIMEOUT = 10 # seconds
|
||||
RETRY_DELAY = 20
|
||||
QUEUE_BACKLOG_SECONDS = 30
|
||||
RETRY_INTERVAL = 60 # seconds
|
||||
BATCH_TIMEOUT = 1
|
||||
BATCH_BUFFER_SIZE = 100
|
||||
LANGUAGE_INFLUXQL = "influxQL"
|
||||
LANGUAGE_FLUX = "flux"
|
||||
TEST_QUERY_V1 = "SHOW DATABASES;"
|
||||
@@ -152,3 +156,10 @@ COMPONENT_CONFIG_SCHEMA_CONNECTION = {
|
||||
vol.Inclusive(CONF_ORG, "v2_authentication"): cv.string,
|
||||
vol.Optional(CONF_BUCKET, default=DEFAULT_BUCKET): cv.string,
|
||||
}
|
||||
|
||||
COMPONENT_CONFIG_SCHEMA_BATCH = {
|
||||
vol.Optional(
|
||||
CONF_BATCH_BUFFER_SIZE, default=DEFAULT_BATCH_BUFFER_SIZE
|
||||
): cv.positive_int,
|
||||
vol.Optional(CONF_BATCH_TIMEOUT, default=DEFAULT_BATCH_TIMEOUT): cv.positive_float,
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class KegtronConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -85,7 +85,7 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovered_adv:
|
||||
self._discovered_advs[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
self._ble_device = discovery_info.device
|
||||
address = discovery_info.address
|
||||
|
||||
@@ -299,8 +299,8 @@ def _create_climate_ui(xknx: XKNX, conf: ConfigExtractor, name: str) -> XknxClim
|
||||
group_address_active_state=conf.get_state_and_passive(CONF_GA_ACTIVE),
|
||||
group_address_command_value_state=conf.get_state_and_passive(CONF_GA_VALVE),
|
||||
sync_state=sync_state,
|
||||
min_temp=conf.get(ClimateConf.MIN_TEMP),
|
||||
max_temp=conf.get(ClimateConf.MAX_TEMP),
|
||||
min_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MIN_TEMP),
|
||||
max_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MAX_TEMP),
|
||||
mode=climate_mode,
|
||||
group_address_fan_speed=conf.get_write(CONF_GA_FAN_SPEED),
|
||||
group_address_fan_speed_state=conf.get_state_and_passive(CONF_GA_FAN_SPEED),
|
||||
@@ -486,7 +486,7 @@ class _KnxClimate(ClimateEntity, _KnxEntityBase):
|
||||
ha_controller_modes.append(self._last_hvac_mode)
|
||||
ha_controller_modes.append(HVACMode.OFF)
|
||||
|
||||
hvac_modes = list(set(filter(None, ha_controller_modes)))
|
||||
hvac_modes = sorted(set(filter(None, ha_controller_modes)))
|
||||
return (
|
||||
hvac_modes
|
||||
if hvac_modes
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.10.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.26.81530"
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class KulerskyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -79,7 +79,7 @@ class Ld2410BleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -35,7 +35,7 @@ class LeaoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -85,7 +85,7 @@ class LedBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -527,4 +527,57 @@ DISCOVERY_SCHEMAS = [
|
||||
vendor_id=(4447,),
|
||||
product_id=(8194,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SELECT,
|
||||
entity_description=MatterSelectEntityDescription(
|
||||
key="AqaraOccupancySensorBooleanStateConfigurationCurrentSensitivityLevel",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="sensitivity_level",
|
||||
options=["low", "standard", "high"],
|
||||
device_to_ha={
|
||||
0: "low",
|
||||
1: "standard",
|
||||
2: "high",
|
||||
}.get,
|
||||
ha_to_device={
|
||||
"low": 0,
|
||||
"standard": 1,
|
||||
"high": 2,
|
||||
}.get,
|
||||
),
|
||||
entity_class=MatterAttributeSelectEntity,
|
||||
required_attributes=(
|
||||
clusters.BooleanStateConfiguration.Attributes.CurrentSensitivityLevel,
|
||||
),
|
||||
vendor_id=(4447,),
|
||||
product_id=(
|
||||
8197,
|
||||
8195,
|
||||
),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SELECT,
|
||||
entity_description=MatterSelectEntityDescription(
|
||||
key="HeimanOccupancySensorBooleanStateConfigurationCurrentSensitivityLevel",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="sensitivity_level",
|
||||
options=["low", "standard", "high"],
|
||||
device_to_ha={
|
||||
0: "low",
|
||||
1: "standard",
|
||||
2: "high",
|
||||
}.get,
|
||||
ha_to_device={
|
||||
"low": 0,
|
||||
"standard": 1,
|
||||
"high": 2,
|
||||
}.get,
|
||||
),
|
||||
entity_class=MatterAttributeSelectEntity,
|
||||
required_attributes=(
|
||||
clusters.BooleanStateConfiguration.Attributes.CurrentSensitivityLevel,
|
||||
),
|
||||
vendor_id=(4619,),
|
||||
product_id=(4097,),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -93,7 +93,7 @@ class InspectorBLEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._discovery_info = self._discovered_devices[address]
|
||||
return await self.async_step_check_connection()
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -49,6 +49,15 @@ ATA_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
value_fn=lambda x: x.device.total_energy_consumed,
|
||||
enabled=lambda x: x.device.has_energy_consumed_meter,
|
||||
),
|
||||
MelcloudSensorEntityDescription(
|
||||
key="outside_temperature",
|
||||
translation_key="outside_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda x: x.device.outdoor_temperature,
|
||||
enabled=lambda x: x.device.has_outdoor_temperature,
|
||||
),
|
||||
)
|
||||
ATW_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
MelcloudSensorEntityDescription(
|
||||
|
||||
@@ -75,7 +75,7 @@ class MelnorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._create_entry(address)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(
|
||||
self.hass, connectable=True
|
||||
):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymiele==0.5.6"],
|
||||
"requirements": ["pymiele==0.6.0"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class MoatConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -66,6 +66,8 @@ from .const import (
|
||||
CONF_BYTESIZE,
|
||||
CONF_CLIMATES,
|
||||
CONF_COLOR_TEMP_REGISTER,
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_SCALE,
|
||||
CONF_DATA_TYPE,
|
||||
CONF_DEVICE_ADDRESS,
|
||||
CONF_FAN_MODE_AUTO,
|
||||
@@ -137,6 +139,8 @@ from .const import (
|
||||
CONF_SWING_MODE_SWING_VERT,
|
||||
CONF_SWING_MODE_VALUES,
|
||||
CONF_TARGET_TEMP,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_TARGET_TEMP_SCALE,
|
||||
CONF_TARGET_TEMP_WRITE_REGISTERS,
|
||||
CONF_VERIFY,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
@@ -159,8 +163,10 @@ from .modbus import DATA_MODBUS_HUBS, ModbusHub, async_modbus_setup
|
||||
from .validators import (
|
||||
duplicate_fan_mode_validator,
|
||||
duplicate_swing_mode_validator,
|
||||
ensure_and_check_conflicting_scales_and_offsets,
|
||||
hvac_fixedsize_reglist_validator,
|
||||
nan_validator,
|
||||
not_zero_value,
|
||||
register_int_list_validator,
|
||||
struct_validator,
|
||||
)
|
||||
@@ -210,8 +216,10 @@ BASE_STRUCT_SCHEMA = BASE_COMPONENT_SCHEMA.extend(
|
||||
]
|
||||
),
|
||||
vol.Optional(CONF_STRUCTURE): cv.string,
|
||||
vol.Optional(CONF_SCALE, default=1): vol.Coerce(float),
|
||||
vol.Optional(CONF_OFFSET, default=0): vol.Coerce(float),
|
||||
vol.Optional(CONF_SCALE): vol.All(
|
||||
vol.Coerce(float), lambda v: not_zero_value(v, "Scale cannot be zero.")
|
||||
),
|
||||
vol.Optional(CONF_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(CONF_PRECISION): cv.positive_int,
|
||||
vol.Optional(
|
||||
CONF_SWAP,
|
||||
@@ -273,6 +281,18 @@ CLIMATE_SCHEMA = vol.All(
|
||||
vol.Optional(CONF_TEMPERATURE_UNIT, default=DEFAULT_TEMP_UNIT): cv.string,
|
||||
vol.Exclusive(CONF_HVAC_ONOFF_COIL, "hvac_onoff_type"): cv.positive_int,
|
||||
vol.Exclusive(CONF_HVAC_ONOFF_REGISTER, "hvac_onoff_type"): cv.positive_int,
|
||||
vol.Optional(CONF_CURRENT_TEMP_SCALE): vol.All(
|
||||
vol.Coerce(float),
|
||||
lambda v: not_zero_value(
|
||||
v, "Current temperature scale cannot be zero."
|
||||
),
|
||||
),
|
||||
vol.Optional(CONF_TARGET_TEMP_SCALE): vol.All(
|
||||
vol.Coerce(float),
|
||||
lambda v: not_zero_value(v, "Target temperature scale cannot be zero."),
|
||||
),
|
||||
vol.Optional(CONF_CURRENT_TEMP_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(CONF_TARGET_TEMP_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(
|
||||
CONF_HVAC_ON_VALUE, default=DEFAULT_HVAC_ON_VALUE
|
||||
): cv.positive_int,
|
||||
@@ -385,6 +405,7 @@ CLIMATE_SCHEMA = vol.All(
|
||||
),
|
||||
},
|
||||
),
|
||||
ensure_and_check_conflicting_scales_and_offsets,
|
||||
)
|
||||
|
||||
COVERS_SCHEMA = BASE_COMPONENT_SCHEMA.extend(
|
||||
|
||||
@@ -50,6 +50,8 @@ from .const import (
|
||||
CALL_TYPE_WRITE_REGISTER,
|
||||
CALL_TYPE_WRITE_REGISTERS,
|
||||
CONF_CLIMATES,
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_SCALE,
|
||||
CONF_FAN_MODE_AUTO,
|
||||
CONF_FAN_MODE_DIFFUSE,
|
||||
CONF_FAN_MODE_FOCUS,
|
||||
@@ -97,8 +99,12 @@ from .const import (
|
||||
CONF_SWING_MODE_SWING_VERT,
|
||||
CONF_SWING_MODE_VALUES,
|
||||
CONF_TARGET_TEMP,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_TARGET_TEMP_SCALE,
|
||||
CONF_TARGET_TEMP_WRITE_REGISTERS,
|
||||
CONF_WRITE_REGISTERS,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
DataType,
|
||||
)
|
||||
from .entity import ModbusStructEntity
|
||||
@@ -166,6 +172,10 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._attr_min_temp = config[CONF_MIN_TEMP]
|
||||
self._attr_max_temp = config[CONF_MAX_TEMP]
|
||||
self._attr_target_temperature_step = config[CONF_STEP]
|
||||
self._current_temp_scale = config[CONF_CURRENT_TEMP_SCALE]
|
||||
self._current_temp_offset = config[CONF_CURRENT_TEMP_OFFSET]
|
||||
self._target_temp_scale = config[CONF_TARGET_TEMP_SCALE]
|
||||
self._target_temp_offset = config[CONF_TARGET_TEMP_OFFSET]
|
||||
|
||||
if CONF_HVAC_MODE_REGISTER in config:
|
||||
mode_config = config[CONF_HVAC_MODE_REGISTER]
|
||||
@@ -413,8 +423,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
target_temperature = (
|
||||
float(kwargs[ATTR_TEMPERATURE]) - self._offset
|
||||
) / self._scale
|
||||
float(kwargs[ATTR_TEMPERATURE]) - self._target_temp_offset
|
||||
) / self._target_temp_scale
|
||||
if self._data_type in (
|
||||
DataType.INT16,
|
||||
DataType.INT32,
|
||||
@@ -472,15 +482,25 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._target_temperature_register[
|
||||
HVACMODE_TO_TARG_TEMP_REG_INDEX_ARRAY[self._attr_hvac_mode]
|
||||
],
|
||||
self._target_temp_scale,
|
||||
self._target_temp_offset,
|
||||
)
|
||||
|
||||
self._attr_current_temperature = await self._async_read_register(
|
||||
self._input_type, self._address
|
||||
self._input_type,
|
||||
self._address,
|
||||
self._current_temp_scale,
|
||||
self._current_temp_offset,
|
||||
)
|
||||
|
||||
# Read the HVAC mode register if defined
|
||||
if self._hvac_mode_register is not None:
|
||||
hvac_mode = await self._async_read_register(
|
||||
CALL_TYPE_REGISTER_HOLDING, self._hvac_mode_register, raw=True
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
self._hvac_mode_register,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
# Translate the value received
|
||||
@@ -499,7 +519,11 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
# Read the HVAC action register if defined
|
||||
if self._hvac_action_register is not None:
|
||||
hvac_action = await self._async_read_register(
|
||||
self._hvac_action_type, self._hvac_action_register, raw=True
|
||||
self._hvac_action_type,
|
||||
self._hvac_action_register,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
# Translate the value received
|
||||
@@ -517,6 +541,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._fan_mode_register
|
||||
if isinstance(self._fan_mode_register, int)
|
||||
else self._fan_mode_register[0],
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
@@ -533,6 +559,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._swing_mode_register
|
||||
if isinstance(self._swing_mode_register, int)
|
||||
else self._swing_mode_register[0],
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
@@ -551,7 +579,11 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
# in the mode register.
|
||||
if self._hvac_onoff_register is not None:
|
||||
onoff = await self._async_read_register(
|
||||
CALL_TYPE_REGISTER_HOLDING, self._hvac_onoff_register, raw=True
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
self._hvac_onoff_register,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
if onoff == self._hvac_off_value:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
@@ -562,7 +594,12 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
|
||||
async def _async_read_register(
|
||||
self, register_type: str, register: int, raw: bool | None = False
|
||||
self,
|
||||
register_type: str,
|
||||
register: int,
|
||||
scale: float,
|
||||
offset: float,
|
||||
raw: bool | None = False,
|
||||
) -> float | None:
|
||||
"""Read register using the Modbus hub slave."""
|
||||
result = await self._hub.async_pb_call(
|
||||
@@ -579,7 +616,7 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
return int(result.registers[0])
|
||||
|
||||
# The regular handling of the value
|
||||
self._value = self.unpack_structure_result(result.registers)
|
||||
self._value = self.unpack_structure_result(result.registers, scale, offset)
|
||||
if not self._value:
|
||||
self._attr_available = False
|
||||
return None
|
||||
|
||||
@@ -19,6 +19,8 @@ CONF_BYTESIZE = "bytesize"
|
||||
CONF_CLIMATES = "climates"
|
||||
CONF_BRIGHTNESS_REGISTER = "brightness_address"
|
||||
CONF_COLOR_TEMP_REGISTER = "color_temp_address"
|
||||
CONF_CURRENT_TEMP_OFFSET = "current_temp_offset"
|
||||
CONF_CURRENT_TEMP_SCALE = "current_temp_scale"
|
||||
CONF_DATA_TYPE = "data_type"
|
||||
CONF_DEVICE_ADDRESS = "device_address"
|
||||
CONF_FANS = "fans"
|
||||
@@ -48,6 +50,8 @@ CONF_SWAP_BYTE = "byte"
|
||||
CONF_SWAP_WORD = "word"
|
||||
CONF_SWAP_WORD_BYTE = "word_byte"
|
||||
CONF_TARGET_TEMP = "target_temp_register"
|
||||
CONF_TARGET_TEMP_OFFSET = "target_temp_offset"
|
||||
CONF_TARGET_TEMP_SCALE = "target_temp_scale"
|
||||
CONF_TARGET_TEMP_WRITE_REGISTERS = "target_temp_write_registers"
|
||||
CONF_FAN_MODE_REGISTER = "fan_mode_register"
|
||||
CONF_FAN_MODE_ON = "state_fan_on"
|
||||
@@ -181,4 +185,7 @@ LIGHT_MODBUS_SCALE_MIN = 0
|
||||
LIGHT_MODBUS_SCALE_MAX = 100
|
||||
LIGHT_MODBUS_INVALID_VALUE = 0xFFFF
|
||||
|
||||
DEFAULT_SCALE = 1.0
|
||||
DEFAULT_OFFSET = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.const import (
|
||||
CONF_DELAY,
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_NAME,
|
||||
CONF_OFFSET,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_SLAVE,
|
||||
CONF_STRUCTURE,
|
||||
@@ -50,7 +49,6 @@ from .const import (
|
||||
CONF_MIN_VALUE,
|
||||
CONF_NAN_VALUE,
|
||||
CONF_PRECISION,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_STATE_OFF,
|
||||
CONF_STATE_ON,
|
||||
@@ -62,6 +60,8 @@ from .const import (
|
||||
CONF_VIRTUAL_COUNT,
|
||||
CONF_WRITE_TYPE,
|
||||
CONF_ZERO_SUPPRESS,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
SIGNAL_STOP_ENTITY,
|
||||
DataType,
|
||||
)
|
||||
@@ -163,8 +163,6 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
self._swap = config[CONF_SWAP]
|
||||
self._data_type = config[CONF_DATA_TYPE]
|
||||
self._structure: str = config[CONF_STRUCTURE]
|
||||
self._scale = config[CONF_SCALE]
|
||||
self._offset = config[CONF_OFFSET]
|
||||
self._slave_count = config.get(CONF_SLAVE_COUNT) or config.get(
|
||||
CONF_VIRTUAL_COUNT, 0
|
||||
)
|
||||
@@ -181,8 +179,6 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
self._precision = config.get(CONF_PRECISION, 2)
|
||||
else:
|
||||
self._precision = config.get(CONF_PRECISION, 0)
|
||||
if self._precision > 0 or self._scale != int(self._scale):
|
||||
self._value_is_int = False
|
||||
|
||||
def _swap_registers(self, registers: list[int], slave_count: int) -> list[int]:
|
||||
"""Do swap as needed."""
|
||||
@@ -206,7 +202,12 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
registers.reverse()
|
||||
return registers
|
||||
|
||||
def __process_raw_value(self, entry: float | str | bytes) -> str | None:
|
||||
def __process_raw_value(
|
||||
self,
|
||||
entry: float | bytes,
|
||||
scale: float = DEFAULT_SCALE,
|
||||
offset: float = DEFAULT_OFFSET,
|
||||
) -> str | None:
|
||||
"""Process value from sensor with NaN handling, scaling, offset, min/max etc."""
|
||||
if self._nan_value is not None and entry in (self._nan_value, -self._nan_value):
|
||||
return None
|
||||
@@ -215,7 +216,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
if entry != entry: # noqa: PLR0124
|
||||
# NaN float detection replace with None
|
||||
return None
|
||||
val: float | int = self._scale * entry + self._offset
|
||||
val: float | int = scale * entry + offset
|
||||
if self._min_value is not None and val < self._min_value:
|
||||
val = self._min_value
|
||||
if self._max_value is not None and val > self._max_value:
|
||||
@@ -226,7 +227,12 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
return str(round(val))
|
||||
return f"{float(val):.{self._precision}f}"
|
||||
|
||||
def unpack_structure_result(self, registers: list[int]) -> str | None:
|
||||
def unpack_structure_result(
|
||||
self,
|
||||
registers: list[int],
|
||||
scale: float = DEFAULT_SCALE,
|
||||
offset: float = DEFAULT_OFFSET,
|
||||
) -> str | None:
|
||||
"""Convert registers to proper result."""
|
||||
|
||||
if self._swap:
|
||||
@@ -250,7 +256,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
# Apply scale, precision, limits to floats and ints
|
||||
v_result = []
|
||||
for entry in val:
|
||||
v_temp = self.__process_raw_value(entry)
|
||||
v_temp = self.__process_raw_value(entry, scale, offset)
|
||||
if self._data_type != DataType.CUSTOM:
|
||||
v_result.append(str(v_temp))
|
||||
else:
|
||||
@@ -258,7 +264,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
return ",".join(map(str, v_result))
|
||||
|
||||
# Apply scale, precision, limits to floats and ints
|
||||
return self.__process_raw_value(val[0])
|
||||
return self.__process_raw_value(val[0], scale, offset)
|
||||
|
||||
|
||||
class ModbusToggleEntity(ModbusBaseEntity, ToggleEntity, RestoreEntity):
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_NAME,
|
||||
CONF_OFFSET,
|
||||
CONF_SENSORS,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
@@ -25,7 +26,14 @@ from homeassistant.helpers.update_coordinator import (
|
||||
)
|
||||
|
||||
from . import get_hub
|
||||
from .const import _LOGGER, CONF_SLAVE_COUNT, CONF_VIRTUAL_COUNT
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
)
|
||||
from .entity import ModbusStructEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
@@ -73,9 +81,13 @@ class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
|
||||
self._coordinator: DataUpdateCoordinator[list[float | None] | None] | None = (
|
||||
None
|
||||
)
|
||||
self._scale = entry.get(CONF_SCALE, DEFAULT_SCALE)
|
||||
self._offset = entry.get(CONF_OFFSET, DEFAULT_OFFSET)
|
||||
self._attr_native_unit_of_measurement = entry.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_state_class = entry.get(CONF_STATE_CLASS)
|
||||
self._attr_device_class = entry.get(CONF_DEVICE_CLASS)
|
||||
if self._precision > 0 or self._scale != int(self._scale):
|
||||
self._value_is_int = False
|
||||
|
||||
async def async_setup_slaves(
|
||||
self, hass: HomeAssistant, slave_count: int, entry: dict[str, Any]
|
||||
@@ -117,7 +129,9 @@ class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
self._attr_available = True
|
||||
result = self.unpack_structure_result(raw_result.registers)
|
||||
result = self.unpack_structure_result(
|
||||
raw_result.registers, self._scale, self._offset
|
||||
)
|
||||
if self._coordinator:
|
||||
result_array: list[float | None] = []
|
||||
if result:
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.const import (
|
||||
CONF_COUNT,
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_OFFSET,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_STRUCTURE,
|
||||
@@ -25,16 +26,23 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import (
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_SCALE,
|
||||
CONF_DATA_TYPE,
|
||||
CONF_FAN_MODE_VALUES,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_SWAP,
|
||||
CONF_SWAP_BYTE,
|
||||
CONF_SWAP_WORD,
|
||||
CONF_SWAP_WORD_BYTE,
|
||||
CONF_SWING_MODE_VALUES,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_TARGET_TEMP_SCALE,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
DEFAULT_HUB,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
@@ -243,6 +251,46 @@ def duplicate_fan_mode_validator(config: dict[str, Any]) -> dict:
|
||||
return config
|
||||
|
||||
|
||||
def not_zero_value(val: float, errMsg: str) -> float:
|
||||
"""Check value is not zero."""
|
||||
if val == 0:
|
||||
raise vol.Invalid(errMsg)
|
||||
return val
|
||||
|
||||
|
||||
def ensure_and_check_conflicting_scales_and_offsets(config: dict[str, Any]) -> dict:
|
||||
"""Check for conflicts in scale/offset and ensure target/current temp scale/offset is set."""
|
||||
config_keys = [
|
||||
(CONF_SCALE, CONF_TARGET_TEMP_SCALE, CONF_CURRENT_TEMP_SCALE, DEFAULT_SCALE),
|
||||
(
|
||||
CONF_OFFSET,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
DEFAULT_OFFSET,
|
||||
),
|
||||
]
|
||||
|
||||
for generic_key, target_key, current_key, default_value in config_keys:
|
||||
if generic_key in config and (target_key in config or current_key in config):
|
||||
raise vol.Invalid(
|
||||
f"Cannot use both '{generic_key}' and temperature-specific parameters "
|
||||
f"('{target_key}' or '{current_key}') in the same configuration. "
|
||||
f"Either the '{generic_key}' parameter (which applies to both temperatures) "
|
||||
"or the new temperature-specific parameters, but not both."
|
||||
)
|
||||
if generic_key in config:
|
||||
value = config.pop(generic_key)
|
||||
config[target_key] = value
|
||||
config[current_key] = value
|
||||
|
||||
if target_key not in config:
|
||||
config[target_key] = default_value
|
||||
if current_key not in config:
|
||||
config[current_key] = default_value
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def duplicate_swing_mode_validator(config: dict[str, Any]) -> dict:
|
||||
"""Control modbus climate swing mode values for duplicates."""
|
||||
swing_modes: set[int] = set()
|
||||
|
||||
@@ -238,12 +238,14 @@ async def _client_listen(
|
||||
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: MusicAssistantConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
mass_entry_data: MusicAssistantEntryData = entry.runtime_data
|
||||
mass_entry_data = entry.runtime_data
|
||||
mass_entry_data.listen_task.cancel()
|
||||
await mass_entry_data.mass.disconnect()
|
||||
|
||||
@@ -251,7 +253,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
|
||||
hass: HomeAssistant,
|
||||
config_entry: MusicAssistantConfigEntry,
|
||||
device_entry: dr.DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
player_id = next(
|
||||
|
||||
@@ -115,6 +115,13 @@ QUEUE_OPTION_MAP = {
|
||||
MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE,
|
||||
}
|
||||
|
||||
REPEAT_MODE_MAPPING_TO_HA = {
|
||||
MassRepeatMode.OFF: RepeatMode.OFF,
|
||||
MassRepeatMode.ONE: RepeatMode.ONE,
|
||||
MassRepeatMode.ALL: RepeatMode.ALL,
|
||||
# UNKNOWN is intentionally not mapped - will return None
|
||||
}
|
||||
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
@@ -657,7 +664,7 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
# player has an MA queue active (either its own queue or some group queue)
|
||||
self._attr_app_id = DOMAIN
|
||||
self._attr_shuffle = queue.shuffle_enabled
|
||||
self._attr_repeat = queue.repeat_mode.value
|
||||
self._attr_repeat = REPEAT_MODE_MAPPING_TO_HA.get(queue.repeat_mode)
|
||||
if not (cur_item := queue.current_item):
|
||||
# queue is empty
|
||||
return
|
||||
|
||||
@@ -19,7 +19,11 @@ from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL
|
||||
from .coordinator import NASwebCoordinator
|
||||
from .nasweb_data import NASwebData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
NASWEB_CONFIG_URL = "https://{host}/page"
|
||||
|
||||
|
||||
154
homeassistant/components/nasweb/alarm_control_panel.py
Normal file
154
homeassistant/components/nasweb/alarm_control_panel.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Platform for NASweb alarms."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from webio_api import Zone as NASwebZone
|
||||
from webio_api.const import STATE_ZONE_ALARM, STATE_ZONE_ARMED, STATE_ZONE_DISARMED
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
DOMAIN as DOMAIN_ALARM_CONTROL_PANEL,
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
BaseDataUpdateCoordinatorProtocol,
|
||||
)
|
||||
|
||||
from . import NASwebConfigEntry
|
||||
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ALARM_CONTROL_PANEL_TRANSLATION_KEY = "zone"
|
||||
|
||||
NASWEB_STATE_TO_HA_STATE = {
|
||||
STATE_ZONE_ALARM: AlarmControlPanelState.TRIGGERED,
|
||||
STATE_ZONE_ARMED: AlarmControlPanelState.ARMED_AWAY,
|
||||
STATE_ZONE_DISARMED: AlarmControlPanelState.DISARMED,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: NASwebConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up alarm control panel platform."""
|
||||
coordinator = config.runtime_data
|
||||
current_zones: set[int] = set()
|
||||
|
||||
@callback
|
||||
def _check_entities() -> None:
|
||||
received_zones: dict[int, NASwebZone] = {
|
||||
entry.index: entry for entry in coordinator.webio_api.zones
|
||||
}
|
||||
added = {i for i in received_zones if i not in current_zones}
|
||||
removed = {i for i in current_zones if i not in received_zones}
|
||||
entities_to_add: list[ZoneEntity] = []
|
||||
for index in added:
|
||||
webio_zone = received_zones[index]
|
||||
if not isinstance(webio_zone, NASwebZone):
|
||||
_LOGGER.error("Cannot create ZoneEntity without NASwebZone")
|
||||
continue
|
||||
new_zone = ZoneEntity(coordinator, webio_zone)
|
||||
entities_to_add.append(new_zone)
|
||||
current_zones.add(index)
|
||||
async_add_entities(entities_to_add)
|
||||
entity_registry = er.async_get(hass)
|
||||
for index in removed:
|
||||
unique_id = f"{DOMAIN}.{config.unique_id}.zone.{index}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
DOMAIN_ALARM_CONTROL_PANEL, DOMAIN, unique_id
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
current_zones.remove(index)
|
||||
else:
|
||||
_LOGGER.warning("Failed to remove old zone: no entity_id")
|
||||
|
||||
coordinator.async_add_listener(_check_entities)
|
||||
_check_entities()
|
||||
|
||||
|
||||
class ZoneEntity(AlarmControlPanelEntity, BaseCoordinatorEntity):
|
||||
"""Entity representing NASweb zone."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_translation_key = ALARM_CONTROL_PANEL_TRANSLATION_KEY
|
||||
|
||||
def __init__(
|
||||
self, coordinator: BaseDataUpdateCoordinatorProtocol, nasweb_zone: NASwebZone
|
||||
) -> None:
|
||||
"""Initialize zone entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone = nasweb_zone
|
||||
self._attr_name = nasweb_zone.name
|
||||
self._attr_translation_placeholders = {"index": f"{nasweb_zone.index:2d}"}
|
||||
self._attr_unique_id = (
|
||||
f"{DOMAIN}.{self._zone.webio_serial}.zone.{self._zone.index}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._zone.webio_serial)},
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
|
||||
def _set_attr_available(
|
||||
self, entity_last_update: float, available: bool | None
|
||||
) -> None:
|
||||
if (
|
||||
self.coordinator.last_update is None
|
||||
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = available if available is not None else False
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_alarm_state = NASWEB_STATE_TO_HA_STATE[self._zone.state]
|
||||
if self._zone.pass_type == 0:
|
||||
self._attr_code_format = CodeFormat.TEXT
|
||||
elif self._zone.pass_type == 1:
|
||||
self._attr_code_format = CodeFormat.NUMBER
|
||||
else:
|
||||
self._attr_code_format = None
|
||||
self._attr_code_arm_required = self._attr_code_format is not None
|
||||
|
||||
self._set_attr_available(self._zone.last_update, self._zone.available)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||
"""
|
||||
|
||||
@property
|
||||
def supported_features(self) -> AlarmControlPanelEntityFeature:
|
||||
"""Return the list of supported features."""
|
||||
return AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Arm away ZoneEntity."""
|
||||
await self._zone.arm(code)
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Disarm ZoneEntity."""
|
||||
await self._zone.disarm(code)
|
||||
@@ -23,6 +23,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
KEY_INPUTS = "inputs"
|
||||
KEY_OUTPUTS = "outputs"
|
||||
KEY_ZONES = "zones"
|
||||
|
||||
|
||||
class NotificationCoordinator:
|
||||
@@ -103,6 +104,7 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(data)
|
||||
|
||||
@@ -197,5 +199,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(new_data)
|
||||
|
||||
@@ -24,6 +24,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"alarm_control_panel": {
|
||||
"zone": {
|
||||
"name": "Zone {index}"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"sensor_input": {
|
||||
"name": "Input {index}",
|
||||
|
||||
76
homeassistant/components/neato/__init__.py
Normal file
76
homeassistant/components/neato/__init__.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Support for Neato botvac connected vacuum cleaners."""
|
||||
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pybotvac import Account
|
||||
from pybotvac.exceptions import NeatoException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
from .const import NEATO_DOMAIN, NEATO_LOGIN
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.CAMERA,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up config entry."""
|
||||
hass.data.setdefault(NEATO_DOMAIN, {})
|
||||
if CONF_TOKEN not in entry.data:
|
||||
raise ConfigEntryAuthFailed
|
||||
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
try:
|
||||
await session.async_ensure_token_valid()
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
_LOGGER.debug("API error: %s (%s)", ex.code, ex.message)
|
||||
if ex.code in (401, 403):
|
||||
raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
neato_session = api.ConfigEntryAuth(hass, entry, implementation)
|
||||
hass.data[NEATO_DOMAIN][entry.entry_id] = neato_session
|
||||
hub = NeatoHub(hass, Account(neato_session))
|
||||
|
||||
await hub.async_update_entry_unique_id(entry)
|
||||
|
||||
try:
|
||||
await hass.async_add_executor_job(hub.update_robots)
|
||||
except NeatoException as ex:
|
||||
_LOGGER.debug("Failed to connect to Neato API")
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
hass.data[NEATO_LOGIN] = hub
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[NEATO_DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
58
homeassistant/components/neato/api.py
Normal file
58
homeassistant/components/neato/api.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""API for Neato Botvac bound to Home Assistant OAuth."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import run_coroutine_threadsafe
|
||||
from typing import Any
|
||||
|
||||
import pybotvac
|
||||
|
||||
from homeassistant import config_entries, core
|
||||
from homeassistant.components.application_credentials import AuthImplementation
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
|
||||
class ConfigEntryAuth(pybotvac.OAuthSession): # type: ignore[misc]
|
||||
"""Provide Neato Botvac authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: core.HomeAssistant,
|
||||
config_entry: config_entries.ConfigEntry,
|
||||
implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation,
|
||||
) -> None:
|
||||
"""Initialize Neato Botvac Auth."""
|
||||
self.hass = hass
|
||||
self.session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, config_entry, implementation
|
||||
)
|
||||
super().__init__(self.session.token, vendor=pybotvac.Neato())
|
||||
|
||||
def refresh_tokens(self) -> str:
|
||||
"""Refresh and return new Neato Botvac tokens."""
|
||||
run_coroutine_threadsafe(
|
||||
self.session.async_ensure_token_valid(), self.hass.loop
|
||||
).result()
|
||||
|
||||
return self.session.token["access_token"] # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class NeatoImplementation(AuthImplementation):
|
||||
"""Neato implementation of LocalOAuth2Implementation.
|
||||
|
||||
We need this class because we have to add client_secret
|
||||
and scope to the authorization request.
|
||||
"""
|
||||
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict[str, Any]:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
return {"client_secret": self.client_secret}
|
||||
|
||||
async def async_generate_authorize_url(self, flow_id: str) -> str:
|
||||
"""Generate a url for the user to authorize.
|
||||
|
||||
We must make sure that the plus signs are not encoded.
|
||||
"""
|
||||
url = await super().async_generate_authorize_url(flow_id)
|
||||
return f"{url}&scope=public_profile+control_robots+maps"
|
||||
28
homeassistant/components/neato/application_credentials.py
Normal file
28
homeassistant/components/neato/application_credentials.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Application credentials platform for neato."""
|
||||
|
||||
from pybotvac import Neato
|
||||
|
||||
from homeassistant.components.application_credentials import (
|
||||
AuthorizationServer,
|
||||
ClientCredential,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
|
||||
|
||||
async def async_get_auth_implementation(
|
||||
hass: HomeAssistant, auth_domain: str, credential: ClientCredential
|
||||
) -> config_entry_oauth2_flow.AbstractOAuth2Implementation:
|
||||
"""Return auth implementation for a custom auth implementation."""
|
||||
vendor = Neato()
|
||||
return api.NeatoImplementation(
|
||||
hass,
|
||||
auth_domain,
|
||||
credential,
|
||||
AuthorizationServer(
|
||||
authorize_url=vendor.auth_endpoint,
|
||||
token_url=vendor.token_endpoint,
|
||||
),
|
||||
)
|
||||
44
homeassistant/components/neato/button.py
Normal file
44
homeassistant/components/neato/button.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Support for Neato buttons."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pybotvac import Robot
|
||||
|
||||
from homeassistant.components.button import ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NEATO_ROBOTS
|
||||
from .entity import NeatoEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Neato button from config entry."""
|
||||
entities = [NeatoDismissAlertButton(robot) for robot in hass.data[NEATO_ROBOTS]]
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class NeatoDismissAlertButton(NeatoEntity, ButtonEntity):
|
||||
"""Representation of a dismiss_alert button entity."""
|
||||
|
||||
_attr_translation_key = "dismiss_alert"
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
robot: Robot,
|
||||
) -> None:
|
||||
"""Initialize a dismiss_alert Neato button entity."""
|
||||
super().__init__(robot)
|
||||
self._attr_unique_id = f"{robot.serial}_dismiss_alert"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
await self.hass.async_add_executor_job(self.robot.dismiss_current_alert)
|
||||
130
homeassistant/components/neato/camera.py
Normal file
130
homeassistant/components/neato/camera.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Support for loading picture from Neato."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pybotvac.exceptions import NeatoRobotException
|
||||
from pybotvac.robot import Robot
|
||||
from urllib3.response import HTTPResponse
|
||||
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NEATO_LOGIN, NEATO_MAP_DATA, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES
|
||||
from .entity import NeatoEntity
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
|
||||
ATTR_GENERATED_AT = "generated_at"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Neato camera with config entry."""
|
||||
neato: NeatoHub = hass.data[NEATO_LOGIN]
|
||||
mapdata: dict[str, Any] | None = hass.data.get(NEATO_MAP_DATA)
|
||||
dev = [
|
||||
NeatoCleaningMap(neato, robot, mapdata)
|
||||
for robot in hass.data[NEATO_ROBOTS]
|
||||
if "maps" in robot.traits
|
||||
]
|
||||
|
||||
if not dev:
|
||||
return
|
||||
|
||||
_LOGGER.debug("Adding robots for cleaning maps %s", dev)
|
||||
async_add_entities(dev, True)
|
||||
|
||||
|
||||
class NeatoCleaningMap(NeatoEntity, Camera):
|
||||
"""Neato cleaning map for last clean."""
|
||||
|
||||
_attr_translation_key = "cleaning_map"
|
||||
|
||||
def __init__(
|
||||
self, neato: NeatoHub, robot: Robot, mapdata: dict[str, Any] | None
|
||||
) -> None:
|
||||
"""Initialize Neato cleaning map."""
|
||||
super().__init__(robot)
|
||||
Camera.__init__(self)
|
||||
self.neato = neato
|
||||
self._mapdata = mapdata
|
||||
self._available = neato is not None
|
||||
self._robot_serial: str = self.robot.serial
|
||||
self._attr_unique_id = self.robot.serial
|
||||
self._generated_at: str | None = None
|
||||
self._image_url: str | None = None
|
||||
self._image: bytes | None = None
|
||||
|
||||
def camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return image response."""
|
||||
self.update()
|
||||
return self._image
|
||||
|
||||
def update(self) -> None:
|
||||
"""Check the contents of the map list."""
|
||||
|
||||
_LOGGER.debug("Running camera update for '%s'", self.entity_id)
|
||||
try:
|
||||
self.neato.update_robots()
|
||||
except NeatoRobotException as ex:
|
||||
if self._available: # Print only once when available
|
||||
_LOGGER.error(
|
||||
"Neato camera connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._image = None
|
||||
self._image_url = None
|
||||
self._available = False
|
||||
return
|
||||
|
||||
if self._mapdata:
|
||||
map_data: dict[str, Any] = self._mapdata[self._robot_serial]["maps"][0]
|
||||
if (image_url := map_data["url"]) == self._image_url:
|
||||
_LOGGER.debug(
|
||||
"The map image_url for '%s' is the same as old", self.entity_id
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
image: HTTPResponse = self.neato.download_map(image_url)
|
||||
except NeatoRobotException as ex:
|
||||
if self._available: # Print only once when available
|
||||
_LOGGER.error(
|
||||
"Neato camera connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._image = None
|
||||
self._image_url = None
|
||||
self._available = False
|
||||
return
|
||||
|
||||
self._image = image.read()
|
||||
self._image_url = image_url
|
||||
self._generated_at = map_data.get("generated_at")
|
||||
self._available = True
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the robot is available."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the vacuum cleaner."""
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
if self._generated_at is not None:
|
||||
data[ATTR_GENERATED_AT] = self._generated_at
|
||||
|
||||
return data
|
||||
64
homeassistant/components/neato/config_flow.py
Normal file
64
homeassistant/components/neato/config_flow.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Config flow for Neato Botvac."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from .const import NEATO_DOMAIN
|
||||
|
||||
|
||||
class OAuth2FlowHandler(
|
||||
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=NEATO_DOMAIN
|
||||
):
|
||||
"""Config flow to handle Neato Botvac OAuth2 authentication."""
|
||||
|
||||
DOMAIN = NEATO_DOMAIN
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""Return logger."""
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
current_entries = self._async_current_entries()
|
||||
if self.source != SOURCE_REAUTH and current_entries:
|
||||
# Already configured
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
return await super().async_step_user(user_input=user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon migration of old entries."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth upon migration of old entries."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow. Update an entry if one already exist."""
|
||||
current_entries = self._async_current_entries()
|
||||
if self.source == SOURCE_REAUTH and current_entries:
|
||||
# Update entry
|
||||
self.hass.config_entries.async_update_entry(
|
||||
current_entries[0], title=self.flow_impl.name, data=data
|
||||
)
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(current_entries[0].entry_id)
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
return self.async_create_entry(title=self.flow_impl.name, data=data)
|
||||
150
homeassistant/components/neato/const.py
Normal file
150
homeassistant/components/neato/const.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Constants for Neato integration."""
|
||||
|
||||
NEATO_DOMAIN = "neato"
|
||||
|
||||
CONF_VENDOR = "vendor"
|
||||
NEATO_LOGIN = "neato_login"
|
||||
NEATO_MAP_DATA = "neato_map_data"
|
||||
NEATO_PERSISTENT_MAPS = "neato_persistent_maps"
|
||||
NEATO_ROBOTS = "neato_robots"
|
||||
|
||||
SCAN_INTERVAL_MINUTES = 1
|
||||
|
||||
MODE = {1: "Eco", 2: "Turbo"}
|
||||
|
||||
ACTION = {
|
||||
0: "Invalid",
|
||||
1: "House Cleaning",
|
||||
2: "Spot Cleaning",
|
||||
3: "Manual Cleaning",
|
||||
4: "Docking",
|
||||
5: "User Menu Active",
|
||||
6: "Suspended Cleaning",
|
||||
7: "Updating",
|
||||
8: "Copying logs",
|
||||
9: "Recovering Location",
|
||||
10: "IEC test",
|
||||
11: "Map cleaning",
|
||||
12: "Exploring map (creating a persistent map)",
|
||||
13: "Acquiring Persistent Map IDs",
|
||||
14: "Creating & Uploading Map",
|
||||
15: "Suspended Exploration",
|
||||
}
|
||||
|
||||
ERRORS = {
|
||||
"ui_error_battery_battundervoltlithiumsafety": "Replace battery",
|
||||
"ui_error_battery_critical": "Replace battery",
|
||||
"ui_error_battery_invalidsensor": "Replace battery",
|
||||
"ui_error_battery_lithiumadapterfailure": "Replace battery",
|
||||
"ui_error_battery_mismatch": "Replace battery",
|
||||
"ui_error_battery_nothermistor": "Replace battery",
|
||||
"ui_error_battery_overtemp": "Replace battery",
|
||||
"ui_error_battery_overvolt": "Replace battery",
|
||||
"ui_error_battery_undercurrent": "Replace battery",
|
||||
"ui_error_battery_undertemp": "Replace battery",
|
||||
"ui_error_battery_undervolt": "Replace battery",
|
||||
"ui_error_battery_unplugged": "Replace battery",
|
||||
"ui_error_brush_stuck": "Brush stuck",
|
||||
"ui_error_brush_overloaded": "Brush overloaded",
|
||||
"ui_error_bumper_stuck": "Bumper stuck",
|
||||
"ui_error_check_battery_switch": "Check battery",
|
||||
"ui_error_corrupt_scb": "Call customer service corrupt board",
|
||||
"ui_error_deck_debris": "Deck debris",
|
||||
"ui_error_dflt_app": "Check Neato app",
|
||||
"ui_error_disconnect_chrg_cable": "Disconnected charge cable",
|
||||
"ui_error_disconnect_usb_cable": "Disconnected USB cable",
|
||||
"ui_error_dust_bin_missing": "Dust bin missing",
|
||||
"ui_error_dust_bin_full": "Dust bin full",
|
||||
"ui_error_dust_bin_emptied": "Dust bin emptied",
|
||||
"ui_error_hardware_failure": "Hardware failure",
|
||||
"ui_error_ldrop_stuck": "Clear my path",
|
||||
"ui_error_lds_jammed": "Clear my path",
|
||||
"ui_error_lds_bad_packets": "Check Neato app",
|
||||
"ui_error_lds_disconnected": "Check Neato app",
|
||||
"ui_error_lds_missed_packets": "Check Neato app",
|
||||
"ui_error_lwheel_stuck": "Clear my path",
|
||||
"ui_error_navigation_backdrop_frontbump": "Clear my path",
|
||||
"ui_error_navigation_backdrop_leftbump": "Clear my path",
|
||||
"ui_error_navigation_backdrop_wheelextended": "Clear my path",
|
||||
"ui_error_navigation_noprogress": "Clear my path",
|
||||
"ui_error_navigation_origin_unclean": "Clear my path",
|
||||
"ui_error_navigation_pathproblems": "Cannot return to base",
|
||||
"ui_error_navigation_pinkycommsfail": "Clear my path",
|
||||
"ui_error_navigation_falling": "Clear my path",
|
||||
"ui_error_navigation_noexitstogo": "Clear my path",
|
||||
"ui_error_navigation_nomotioncommands": "Clear my path",
|
||||
"ui_error_navigation_rightdrop_leftbump": "Clear my path",
|
||||
"ui_error_navigation_undockingfailed": "Clear my path",
|
||||
"ui_error_picked_up": "Picked up",
|
||||
"ui_error_qa_fail": "Check Neato app",
|
||||
"ui_error_rdrop_stuck": "Clear my path",
|
||||
"ui_error_reconnect_failed": "Reconnect failed",
|
||||
"ui_error_rwheel_stuck": "Clear my path",
|
||||
"ui_error_stuck": "Stuck!",
|
||||
"ui_error_unable_to_return_to_base": "Unable to return to base",
|
||||
"ui_error_unable_to_see": "Clean vacuum sensors",
|
||||
"ui_error_vacuum_slip": "Clear my path",
|
||||
"ui_error_vacuum_stuck": "Clear my path",
|
||||
"ui_error_warning": "Error check app",
|
||||
"batt_base_connect_fail": "Battery failed to connect to base",
|
||||
"batt_base_no_power": "Battery base has no power",
|
||||
"batt_low": "Battery low",
|
||||
"batt_on_base": "Battery on base",
|
||||
"clean_tilt_on_start": "Clean the tilt on start",
|
||||
"dustbin_full": "Dust bin full",
|
||||
"dustbin_missing": "Dust bin missing",
|
||||
"gen_picked_up": "Picked up",
|
||||
"hw_fail": "Hardware failure",
|
||||
"hw_tof_sensor_sensor": "Hardware sensor disconnected",
|
||||
"lds_bad_packets": "Bad packets",
|
||||
"lds_deck_debris": "Debris on deck",
|
||||
"lds_disconnected": "Disconnected",
|
||||
"lds_jammed": "Jammed",
|
||||
"lds_missed_packets": "Missed packets",
|
||||
"maint_brush_stuck": "Brush stuck",
|
||||
"maint_brush_overload": "Brush overloaded",
|
||||
"maint_bumper_stuck": "Bumper stuck",
|
||||
"maint_customer_support_qa": "Contact customer support",
|
||||
"maint_vacuum_stuck": "Vacuum is stuck",
|
||||
"maint_vacuum_slip": "Vacuum is stuck",
|
||||
"maint_left_drop_stuck": "Vacuum is stuck",
|
||||
"maint_left_wheel_stuck": "Vacuum is stuck",
|
||||
"maint_right_drop_stuck": "Vacuum is stuck",
|
||||
"maint_right_wheel_stuck": "Vacuum is stuck",
|
||||
"not_on_charge_base": "Not on the charge base",
|
||||
"nav_robot_falling": "Clear my path",
|
||||
"nav_no_path": "Clear my path",
|
||||
"nav_path_problem": "Clear my path",
|
||||
"nav_backdrop_frontbump": "Clear my path",
|
||||
"nav_backdrop_leftbump": "Clear my path",
|
||||
"nav_backdrop_wheelextended": "Clear my path",
|
||||
"nav_floorplan_zone_path_blocked": "Clear my path",
|
||||
"nav_mag_sensor": "Clear my path",
|
||||
"nav_no_exit": "Clear my path",
|
||||
"nav_no_movement": "Clear my path",
|
||||
"nav_rightdrop_leftbump": "Clear my path",
|
||||
"nav_undocking_failed": "Clear my path",
|
||||
}
|
||||
|
||||
ALERTS = {
|
||||
"ui_alert_dust_bin_full": "Please empty dust bin",
|
||||
"ui_alert_recovering_location": "Returning to start",
|
||||
"ui_alert_battery_chargebasecommerr": "Battery error",
|
||||
"ui_alert_busy_charging": "Busy charging",
|
||||
"ui_alert_charging_base": "Base charging",
|
||||
"ui_alert_charging_power": "Charging power",
|
||||
"ui_alert_connect_chrg_cable": "Connect charge cable",
|
||||
"ui_alert_info_thank_you": "Thank you",
|
||||
"ui_alert_invalid": "Invalid check app",
|
||||
"ui_alert_old_error": "Old error",
|
||||
"ui_alert_swupdate_fail": "Update failed",
|
||||
"dustbin_full": "Please empty dust bin",
|
||||
"maint_brush_change": "Change the brush",
|
||||
"maint_filter_change": "Change the filter",
|
||||
"clean_completed_to_start": "Cleaning completed",
|
||||
"nav_floorplan_not_created": "No floorplan found",
|
||||
"nav_floorplan_load_fail": "Failed to load floorplan",
|
||||
"nav_floorplan_localization_fail": "Failed to load floorplan",
|
||||
"clean_incomplete_to_start": "Cleaning incomplete",
|
||||
"log_upload_failed": "Logs failed to upload",
|
||||
}
|
||||
24
homeassistant/components/neato/entity.py
Normal file
24
homeassistant/components/neato/entity.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Base entity for Neato."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pybotvac import Robot
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import NEATO_DOMAIN
|
||||
|
||||
|
||||
class NeatoEntity(Entity):
|
||||
"""Base Neato entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, robot: Robot) -> None:
|
||||
"""Initialize Neato entity."""
|
||||
self.robot = robot
|
||||
self._attr_device_info: DeviceInfo = DeviceInfo(
|
||||
identifiers={(NEATO_DOMAIN, self.robot.serial)},
|
||||
name=self.robot.name,
|
||||
)
|
||||
50
homeassistant/components/neato/hub.py
Normal file
50
homeassistant/components/neato/hub.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Support for Neato botvac connected vacuum cleaners."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pybotvac import Account
|
||||
from urllib3.response import HTTPResponse
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import NEATO_MAP_DATA, NEATO_PERSISTENT_MAPS, NEATO_ROBOTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NeatoHub:
|
||||
"""A My Neato hub wrapper class."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, neato: Account) -> None:
|
||||
"""Initialize the Neato hub."""
|
||||
self._hass = hass
|
||||
self.my_neato: Account = neato
|
||||
|
||||
@Throttle(timedelta(minutes=1))
|
||||
def update_robots(self) -> None:
|
||||
"""Update the robot states."""
|
||||
_LOGGER.debug("Running HUB.update_robots %s", self._hass.data.get(NEATO_ROBOTS))
|
||||
self._hass.data[NEATO_ROBOTS] = self.my_neato.robots
|
||||
self._hass.data[NEATO_PERSISTENT_MAPS] = self.my_neato.persistent_maps
|
||||
self._hass.data[NEATO_MAP_DATA] = self.my_neato.maps
|
||||
|
||||
def download_map(self, url: str) -> HTTPResponse:
|
||||
"""Download a new map image."""
|
||||
map_image_data: HTTPResponse = self.my_neato.get_map_image(url)
|
||||
return map_image_data
|
||||
|
||||
async def async_update_entry_unique_id(self, entry: ConfigEntry) -> str:
|
||||
"""Update entry for unique_id."""
|
||||
|
||||
await self._hass.async_add_executor_job(self.my_neato.refresh_userdata)
|
||||
unique_id: str = self.my_neato.unique_id
|
||||
|
||||
if entry.unique_id == unique_id:
|
||||
return unique_id
|
||||
|
||||
_LOGGER.debug("Updating user unique_id for previous config entry")
|
||||
self._hass.config_entries.async_update_entry(entry, unique_id=unique_id)
|
||||
return unique_id
|
||||
7
homeassistant/components/neato/icons.json
Normal file
7
homeassistant/components/neato/icons.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"custom_cleaning": {
|
||||
"service": "mdi:broom"
|
||||
}
|
||||
}
|
||||
}
|
||||
11
homeassistant/components/neato/manifest.json
Normal file
11
homeassistant/components/neato/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "neato",
|
||||
"name": "Neato Botvac",
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/neato",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pybotvac"],
|
||||
"requirements": ["pybotvac==0.0.28"]
|
||||
}
|
||||
81
homeassistant/components/neato/sensor.py
Normal file
81
homeassistant/components/neato/sensor.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Support for Neato sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pybotvac.exceptions import NeatoRobotException
|
||||
from pybotvac.robot import Robot
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NEATO_LOGIN, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES
|
||||
from .entity import NeatoEntity
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
|
||||
|
||||
BATTERY = "Battery"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Neato sensor using config entry."""
|
||||
neato: NeatoHub = hass.data[NEATO_LOGIN]
|
||||
dev = [NeatoSensor(neato, robot) for robot in hass.data[NEATO_ROBOTS]]
|
||||
|
||||
if not dev:
|
||||
return
|
||||
|
||||
_LOGGER.debug("Adding robots for sensors %s", dev)
|
||||
async_add_entities(dev, True)
|
||||
|
||||
|
||||
class NeatoSensor(NeatoEntity, SensorEntity):
|
||||
"""Neato sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.BATTERY
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
_attr_available: bool = False
|
||||
|
||||
def __init__(self, neato: NeatoHub, robot: Robot) -> None:
|
||||
"""Initialize Neato sensor."""
|
||||
super().__init__(robot)
|
||||
self._robot_serial: str = self.robot.serial
|
||||
self._attr_unique_id = self.robot.serial
|
||||
self._state: dict[str, Any] | None = None
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update Neato Sensor."""
|
||||
try:
|
||||
self._state = self.robot.state
|
||||
except NeatoRobotException as ex:
|
||||
if self._attr_available:
|
||||
_LOGGER.error(
|
||||
"Neato sensor connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._state = None
|
||||
self._attr_available = False
|
||||
return
|
||||
|
||||
self._attr_available = True
|
||||
_LOGGER.debug("self._state=%s", self._state)
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state."""
|
||||
if self._state is not None:
|
||||
return str(self._state["details"]["charge"])
|
||||
return None
|
||||
32
homeassistant/components/neato/services.yaml
Normal file
32
homeassistant/components/neato/services.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
custom_cleaning:
|
||||
target:
|
||||
entity:
|
||||
integration: neato
|
||||
domain: vacuum
|
||||
fields:
|
||||
mode:
|
||||
default: 2
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 2
|
||||
mode: box
|
||||
navigation:
|
||||
default: 1
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 3
|
||||
mode: box
|
||||
category:
|
||||
default: 4
|
||||
selector:
|
||||
number:
|
||||
min: 2
|
||||
max: 4
|
||||
step: 2
|
||||
mode: box
|
||||
zone:
|
||||
example: "Kitchen"
|
||||
selector:
|
||||
text:
|
||||
73
homeassistant/components/neato/strings.json
Normal file
73
homeassistant/components/neato/strings.json
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
},
|
||||
"step": {
|
||||
"pick_implementation": {
|
||||
"data": {
|
||||
"implementation": "[%key:common::config_flow::data::implementation%]"
|
||||
},
|
||||
"data_description": {
|
||||
"implementation": "[%key:common::config_flow::description::implementation%]"
|
||||
},
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"dismiss_alert": {
|
||||
"name": "Dismiss alert"
|
||||
}
|
||||
},
|
||||
"camera": {
|
||||
"cleaning_map": {
|
||||
"name": "Cleaning map"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"schedule": {
|
||||
"name": "Schedule"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"custom_cleaning": {
|
||||
"description": "Starts a custom cleaning of your house.",
|
||||
"fields": {
|
||||
"category": {
|
||||
"description": "Whether to use a persistent map or not for cleaning (i.e. No go lines): 2 for no map, 4 for map. Default to using map if not set (and fallback to no map if no map is found).",
|
||||
"name": "Use cleaning map"
|
||||
},
|
||||
"mode": {
|
||||
"description": "Sets the cleaning mode: 1 for eco and 2 for turbo. Defaults to turbo if not set.",
|
||||
"name": "Cleaning mode"
|
||||
},
|
||||
"navigation": {
|
||||
"description": "Sets the navigation mode: 1 for normal, 2 for extra care, 3 for deep. Defaults to normal if not set.",
|
||||
"name": "Navigation mode"
|
||||
},
|
||||
"zone": {
|
||||
"description": "Name of the zone to clean (only supported on the Botvac D7). Defaults to no zone i.e. complete house cleanup.",
|
||||
"name": "Zone"
|
||||
}
|
||||
},
|
||||
"name": "Custom cleaning"
|
||||
}
|
||||
}
|
||||
}
|
||||
118
homeassistant/components/neato/switch.py
Normal file
118
homeassistant/components/neato/switch.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""Support for Neato Connected Vacuums switches."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pybotvac.exceptions import NeatoRobotException
|
||||
from pybotvac.robot import Robot
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_OFF, STATE_ON, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NEATO_LOGIN, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES
|
||||
from .entity import NeatoEntity
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
|
||||
|
||||
SWITCH_TYPE_SCHEDULE = "schedule"
|
||||
|
||||
SWITCH_TYPES = {SWITCH_TYPE_SCHEDULE: ["Schedule"]}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Neato switch with config entry."""
|
||||
neato: NeatoHub = hass.data[NEATO_LOGIN]
|
||||
dev = [
|
||||
NeatoConnectedSwitch(neato, robot, type_name)
|
||||
for robot in hass.data[NEATO_ROBOTS]
|
||||
for type_name in SWITCH_TYPES
|
||||
]
|
||||
|
||||
if not dev:
|
||||
return
|
||||
|
||||
_LOGGER.debug("Adding switches %s", dev)
|
||||
async_add_entities(dev, True)
|
||||
|
||||
|
||||
class NeatoConnectedSwitch(NeatoEntity, SwitchEntity):
|
||||
"""Neato Connected Switches."""
|
||||
|
||||
_attr_translation_key = "schedule"
|
||||
_attr_available = False
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(self, neato: NeatoHub, robot: Robot, switch_type: str) -> None:
|
||||
"""Initialize the Neato Connected switches."""
|
||||
super().__init__(robot)
|
||||
self.type = switch_type
|
||||
self._state: dict[str, Any] | None = None
|
||||
self._schedule_state: str | None = None
|
||||
self._clean_state = None
|
||||
self._attr_unique_id = self.robot.serial
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the states of Neato switches."""
|
||||
_LOGGER.debug("Running Neato switch update for '%s'", self.entity_id)
|
||||
try:
|
||||
self._state = self.robot.state
|
||||
except NeatoRobotException as ex:
|
||||
if self._attr_available: # Print only once when available
|
||||
_LOGGER.error(
|
||||
"Neato switch connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._state = None
|
||||
self._attr_available = False
|
||||
return
|
||||
|
||||
self._attr_available = True
|
||||
_LOGGER.debug("self._state=%s", self._state)
|
||||
if self.type == SWITCH_TYPE_SCHEDULE:
|
||||
_LOGGER.debug("State: %s", self._state)
|
||||
if self._state is not None and self._state["details"]["isScheduleEnabled"]:
|
||||
self._schedule_state = STATE_ON
|
||||
else:
|
||||
self._schedule_state = STATE_OFF
|
||||
_LOGGER.debug(
|
||||
"Schedule state for '%s': %s", self.entity_id, self._schedule_state
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if switch is on."""
|
||||
return bool(
|
||||
self.type == SWITCH_TYPE_SCHEDULE and self._schedule_state == STATE_ON
|
||||
)
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
if self.type == SWITCH_TYPE_SCHEDULE:
|
||||
try:
|
||||
self.robot.enable_schedule()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato switch connection error '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
if self.type == SWITCH_TYPE_SCHEDULE:
|
||||
try:
|
||||
self.robot.disable_schedule()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato switch connection error '%s': %s", self.entity_id, ex
|
||||
)
|
||||
388
homeassistant/components/neato/vacuum.py
Normal file
388
homeassistant/components/neato/vacuum.py
Normal file
@@ -0,0 +1,388 @@
|
||||
"""Support for Neato Connected Vacuums."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pybotvac import Robot
|
||||
from pybotvac.exceptions import NeatoRobotException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.vacuum import (
|
||||
ATTR_STATUS,
|
||||
StateVacuumEntity,
|
||||
VacuumActivity,
|
||||
VacuumEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_MODE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
ACTION,
|
||||
ALERTS,
|
||||
ERRORS,
|
||||
MODE,
|
||||
NEATO_LOGIN,
|
||||
NEATO_MAP_DATA,
|
||||
NEATO_PERSISTENT_MAPS,
|
||||
NEATO_ROBOTS,
|
||||
SCAN_INTERVAL_MINUTES,
|
||||
)
|
||||
from .entity import NeatoEntity
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
|
||||
|
||||
ATTR_CLEAN_START = "clean_start"
|
||||
ATTR_CLEAN_STOP = "clean_stop"
|
||||
ATTR_CLEAN_AREA = "clean_area"
|
||||
ATTR_CLEAN_BATTERY_START = "battery_level_at_clean_start"
|
||||
ATTR_CLEAN_BATTERY_END = "battery_level_at_clean_end"
|
||||
ATTR_CLEAN_SUSP_COUNT = "clean_suspension_count"
|
||||
ATTR_CLEAN_SUSP_TIME = "clean_suspension_time"
|
||||
ATTR_CLEAN_PAUSE_TIME = "clean_pause_time"
|
||||
ATTR_CLEAN_ERROR_TIME = "clean_error_time"
|
||||
ATTR_LAUNCHED_FROM = "launched_from"
|
||||
|
||||
ATTR_NAVIGATION = "navigation"
|
||||
ATTR_CATEGORY = "category"
|
||||
ATTR_ZONE = "zone"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Neato vacuum with config entry."""
|
||||
neato: NeatoHub = hass.data[NEATO_LOGIN]
|
||||
mapdata: dict[str, Any] | None = hass.data.get(NEATO_MAP_DATA)
|
||||
persistent_maps: dict[str, Any] | None = hass.data.get(NEATO_PERSISTENT_MAPS)
|
||||
dev = [
|
||||
NeatoConnectedVacuum(neato, robot, mapdata, persistent_maps)
|
||||
for robot in hass.data[NEATO_ROBOTS]
|
||||
]
|
||||
|
||||
if not dev:
|
||||
return
|
||||
|
||||
_LOGGER.debug("Adding vacuums %s", dev)
|
||||
async_add_entities(dev, True)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
assert platform is not None
|
||||
|
||||
platform.async_register_entity_service(
|
||||
"custom_cleaning",
|
||||
{
|
||||
vol.Optional(ATTR_MODE, default=2): cv.positive_int,
|
||||
vol.Optional(ATTR_NAVIGATION, default=1): cv.positive_int,
|
||||
vol.Optional(ATTR_CATEGORY, default=4): cv.positive_int,
|
||||
vol.Optional(ATTR_ZONE): cv.string,
|
||||
},
|
||||
"neato_custom_cleaning",
|
||||
)
|
||||
|
||||
|
||||
class NeatoConnectedVacuum(NeatoEntity, StateVacuumEntity):
|
||||
"""Representation of a Neato Connected Vacuum."""
|
||||
|
||||
_attr_supported_features = (
|
||||
VacuumEntityFeature.BATTERY
|
||||
| VacuumEntityFeature.PAUSE
|
||||
| VacuumEntityFeature.RETURN_HOME
|
||||
| VacuumEntityFeature.STOP
|
||||
| VacuumEntityFeature.START
|
||||
| VacuumEntityFeature.CLEAN_SPOT
|
||||
| VacuumEntityFeature.STATE
|
||||
| VacuumEntityFeature.MAP
|
||||
| VacuumEntityFeature.LOCATE
|
||||
)
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
neato: NeatoHub,
|
||||
robot: Robot,
|
||||
mapdata: dict[str, Any] | None,
|
||||
persistent_maps: dict[str, Any] | None,
|
||||
) -> None:
|
||||
"""Initialize the Neato Connected Vacuum."""
|
||||
super().__init__(robot)
|
||||
self._attr_available: bool = neato is not None
|
||||
self._mapdata = mapdata
|
||||
self._robot_has_map: bool = self.robot.has_persistent_maps
|
||||
self._robot_maps = persistent_maps
|
||||
self._robot_serial: str = self.robot.serial
|
||||
self._attr_unique_id: str = self.robot.serial
|
||||
self._status_state: str | None = None
|
||||
self._state: dict[str, Any] | None = None
|
||||
self._clean_time_start: str | None = None
|
||||
self._clean_time_stop: str | None = None
|
||||
self._clean_area: float | None = None
|
||||
self._clean_battery_start: int | None = None
|
||||
self._clean_battery_end: int | None = None
|
||||
self._clean_susp_charge_count: int | None = None
|
||||
self._clean_susp_time: int | None = None
|
||||
self._clean_pause_time: int | None = None
|
||||
self._clean_error_time: int | None = None
|
||||
self._launched_from: str | None = None
|
||||
self._robot_boundaries: list = []
|
||||
self._robot_stats: dict[str, Any] | None = None
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the states of Neato Vacuums."""
|
||||
_LOGGER.debug("Running Neato Vacuums update for '%s'", self.entity_id)
|
||||
try:
|
||||
if self._robot_stats is None:
|
||||
self._robot_stats = self.robot.get_general_info().json().get("data")
|
||||
except NeatoRobotException:
|
||||
_LOGGER.warning("Couldn't fetch robot information of %s", self.entity_id)
|
||||
|
||||
try:
|
||||
self._state = self.robot.state
|
||||
except NeatoRobotException as ex:
|
||||
if self._attr_available: # print only once when available
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._state = None
|
||||
self._attr_available = False
|
||||
return
|
||||
|
||||
if self._state is None:
|
||||
return
|
||||
self._attr_available = True
|
||||
_LOGGER.debug("self._state=%s", self._state)
|
||||
if "alert" in self._state:
|
||||
robot_alert = ALERTS.get(self._state["alert"])
|
||||
else:
|
||||
robot_alert = None
|
||||
if self._state["state"] == 1:
|
||||
if self._state["details"]["isCharging"]:
|
||||
self._attr_activity = VacuumActivity.DOCKED
|
||||
self._status_state = "Charging"
|
||||
elif (
|
||||
self._state["details"]["isDocked"]
|
||||
and not self._state["details"]["isCharging"]
|
||||
):
|
||||
self._attr_activity = VacuumActivity.DOCKED
|
||||
self._status_state = "Docked"
|
||||
else:
|
||||
self._attr_activity = VacuumActivity.IDLE
|
||||
self._status_state = "Stopped"
|
||||
|
||||
if robot_alert is not None:
|
||||
self._status_state = robot_alert
|
||||
elif self._state["state"] == 2:
|
||||
if robot_alert is None:
|
||||
self._attr_activity = VacuumActivity.CLEANING
|
||||
self._status_state = (
|
||||
f"{MODE.get(self._state['cleaning']['mode'])} "
|
||||
f"{ACTION.get(self._state['action'])}"
|
||||
)
|
||||
if (
|
||||
"boundary" in self._state["cleaning"]
|
||||
and "name" in self._state["cleaning"]["boundary"]
|
||||
):
|
||||
self._status_state += (
|
||||
f" {self._state['cleaning']['boundary']['name']}"
|
||||
)
|
||||
else:
|
||||
self._status_state = robot_alert
|
||||
elif self._state["state"] == 3:
|
||||
self._attr_activity = VacuumActivity.PAUSED
|
||||
self._status_state = "Paused"
|
||||
elif self._state["state"] == 4:
|
||||
self._attr_activity = VacuumActivity.ERROR
|
||||
self._status_state = ERRORS.get(self._state["error"])
|
||||
|
||||
self._attr_battery_level = self._state["details"]["charge"]
|
||||
|
||||
if self._mapdata is None or not self._mapdata.get(self._robot_serial, {}).get(
|
||||
"maps", []
|
||||
):
|
||||
return
|
||||
|
||||
mapdata: dict[str, Any] = self._mapdata[self._robot_serial]["maps"][0]
|
||||
self._clean_time_start = mapdata["start_at"]
|
||||
self._clean_time_stop = mapdata["end_at"]
|
||||
self._clean_area = mapdata["cleaned_area"]
|
||||
self._clean_susp_charge_count = mapdata["suspended_cleaning_charging_count"]
|
||||
self._clean_susp_time = mapdata["time_in_suspended_cleaning"]
|
||||
self._clean_pause_time = mapdata["time_in_pause"]
|
||||
self._clean_error_time = mapdata["time_in_error"]
|
||||
self._clean_battery_start = mapdata["run_charge_at_start"]
|
||||
self._clean_battery_end = mapdata["run_charge_at_end"]
|
||||
self._launched_from = mapdata["launched_from"]
|
||||
|
||||
if (
|
||||
self._robot_has_map
|
||||
and self._state
|
||||
and self._state["availableServices"]["maps"] != "basic-1"
|
||||
and self._robot_maps
|
||||
):
|
||||
allmaps: dict = self._robot_maps[self._robot_serial]
|
||||
_LOGGER.debug(
|
||||
"Found the following maps for '%s': %s", self.entity_id, allmaps
|
||||
)
|
||||
self._robot_boundaries = [] # Reset boundaries before refreshing boundaries
|
||||
for maps in allmaps:
|
||||
try:
|
||||
robot_boundaries = self.robot.get_map_boundaries(maps["id"]).json()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Could not fetch map boundaries for '%s': %s",
|
||||
self.entity_id,
|
||||
ex,
|
||||
)
|
||||
return
|
||||
|
||||
_LOGGER.debug(
|
||||
"Boundaries for robot '%s' in map '%s': %s",
|
||||
self.entity_id,
|
||||
maps["name"],
|
||||
robot_boundaries,
|
||||
)
|
||||
if "boundaries" in robot_boundaries["data"]:
|
||||
self._robot_boundaries += robot_boundaries["data"]["boundaries"]
|
||||
_LOGGER.debug(
|
||||
"List of boundaries for '%s': %s",
|
||||
self.entity_id,
|
||||
self._robot_boundaries,
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the vacuum cleaner."""
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
if self._status_state is not None:
|
||||
data[ATTR_STATUS] = self._status_state
|
||||
if self._clean_time_start is not None:
|
||||
data[ATTR_CLEAN_START] = self._clean_time_start
|
||||
if self._clean_time_stop is not None:
|
||||
data[ATTR_CLEAN_STOP] = self._clean_time_stop
|
||||
if self._clean_area is not None:
|
||||
data[ATTR_CLEAN_AREA] = self._clean_area
|
||||
if self._clean_susp_charge_count is not None:
|
||||
data[ATTR_CLEAN_SUSP_COUNT] = self._clean_susp_charge_count
|
||||
if self._clean_susp_time is not None:
|
||||
data[ATTR_CLEAN_SUSP_TIME] = self._clean_susp_time
|
||||
if self._clean_pause_time is not None:
|
||||
data[ATTR_CLEAN_PAUSE_TIME] = self._clean_pause_time
|
||||
if self._clean_error_time is not None:
|
||||
data[ATTR_CLEAN_ERROR_TIME] = self._clean_error_time
|
||||
if self._clean_battery_start is not None:
|
||||
data[ATTR_CLEAN_BATTERY_START] = self._clean_battery_start
|
||||
if self._clean_battery_end is not None:
|
||||
data[ATTR_CLEAN_BATTERY_END] = self._clean_battery_end
|
||||
if self._launched_from is not None:
|
||||
data[ATTR_LAUNCHED_FROM] = self._launched_from
|
||||
|
||||
return data
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Device info for neato robot."""
|
||||
device_info = self._attr_device_info
|
||||
if self._robot_stats:
|
||||
device_info["manufacturer"] = self._robot_stats["battery"]["vendor"]
|
||||
device_info["model"] = self._robot_stats["model"]
|
||||
device_info["sw_version"] = self._robot_stats["firmware"]
|
||||
return device_info
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start cleaning or resume cleaning."""
|
||||
if self._state:
|
||||
try:
|
||||
if self._state["state"] == 1:
|
||||
self.robot.start_cleaning()
|
||||
elif self._state["state"] == 3:
|
||||
self.robot.resume_cleaning()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def pause(self) -> None:
|
||||
"""Pause the vacuum."""
|
||||
try:
|
||||
self.robot.pause_cleaning()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def return_to_base(self, **kwargs: Any) -> None:
|
||||
"""Set the vacuum cleaner to return to the dock."""
|
||||
try:
|
||||
if self._attr_activity == VacuumActivity.CLEANING:
|
||||
self.robot.pause_cleaning()
|
||||
self._attr_activity = VacuumActivity.RETURNING
|
||||
self.robot.send_to_base()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def stop(self, **kwargs: Any) -> None:
|
||||
"""Stop the vacuum cleaner."""
|
||||
try:
|
||||
self.robot.stop_cleaning()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def locate(self, **kwargs: Any) -> None:
|
||||
"""Locate the robot by making it emit a sound."""
|
||||
try:
|
||||
self.robot.locate()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def clean_spot(self, **kwargs: Any) -> None:
|
||||
"""Run a spot cleaning starting from the base."""
|
||||
try:
|
||||
self.robot.start_spot_cleaning()
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
|
||||
def neato_custom_cleaning(
|
||||
self, mode: str, navigation: str, category: str, zone: str | None = None
|
||||
) -> None:
|
||||
"""Zone cleaning service call."""
|
||||
boundary_id = None
|
||||
if zone is not None:
|
||||
for boundary in self._robot_boundaries:
|
||||
if zone in boundary["name"]:
|
||||
boundary_id = boundary["id"]
|
||||
if boundary_id is None:
|
||||
_LOGGER.error(
|
||||
"Zone '%s' was not found for the robot '%s'", zone, self.entity_id
|
||||
)
|
||||
return
|
||||
_LOGGER.debug(
|
||||
"Start cleaning zone '%s' with robot %s", zone, self.entity_id
|
||||
)
|
||||
|
||||
self._attr_activity = VacuumActivity.CLEANING
|
||||
try:
|
||||
self.robot.start_cleaning(mode, navigation, category, boundary_id)
|
||||
except NeatoRobotException as ex:
|
||||
_LOGGER.error(
|
||||
"Neato vacuum connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynintendoparental"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pynintendoparental==1.1.2"]
|
||||
"requirements": ["pynintendoparental==1.1.3"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ from onedrive_personal_sdk.exceptions import (
|
||||
NotFoundError,
|
||||
OneDriveException,
|
||||
)
|
||||
from onedrive_personal_sdk.models.items import Item, ItemUpdate
|
||||
from onedrive_personal_sdk.models.items import ItemUpdate
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -202,9 +202,7 @@ async def _get_onedrive_client(
|
||||
)
|
||||
|
||||
|
||||
async def _handle_item_operation(
|
||||
func: Callable[[], Awaitable[Item]], folder: str
|
||||
) -> Item:
|
||||
async def _handle_item_operation[T](func: Callable[[], Awaitable[T]], folder: str) -> T:
|
||||
try:
|
||||
return await func()
|
||||
except NotFoundError:
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.14"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.15"]
|
||||
}
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["openai==2.2.0", "python-open-router==0.3.1"]
|
||||
"requirements": ["openai==2.2.0", "python-open-router==0.3.2"]
|
||||
}
|
||||
|
||||
@@ -229,7 +229,7 @@ class OverkizConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the local authentication step via config flow."""
|
||||
errors = {}
|
||||
description_placeholders = {
|
||||
"somfy-developer-mode-docs": "https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started"
|
||||
"somfy_developer_mode_docs": "https://github.com/Somfy-Developer/Somfy-TaHoma-Developer-Mode#getting-started"
|
||||
}
|
||||
|
||||
if user_input:
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"token": "Token generated by the app used to control your device.",
|
||||
"verify_ssl": "Verify the SSL certificate. Select this only if you are connecting via the hostname."
|
||||
},
|
||||
"description": "By activating the [Developer Mode of your TaHoma box]({somfy-developer-mode-docs}), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\n1. Open the TaHoma By Somfy application on your device.\n2. Navigate to the Help & advanced features -> Advanced features menu in the application.\n3. Activate Developer Mode by tapping 7 times on the version number of your gateway (like 2025.1.4-11).\n4. Generate a token from the Developer Mode menu to authenticate your API calls.\n\n5. Enter the generated token below and update the host to include your Gateway PIN or the IP address of your gateway."
|
||||
"description": "By activating the [Developer Mode of your TaHoma box]({somfy_developer_mode_docs}), you can authorize third-party software (like Home Assistant) to connect to it via your local network.\n\n1. Open the TaHoma By Somfy application on your device.\n2. Navigate to the Help & advanced features -> Advanced features menu in the application.\n3. Activate Developer Mode by tapping 7 times on the version number of your gateway (like 2025.1.4-11).\n4. Generate a token from the Developer Mode menu to authenticate your API calls.\n\n5. Enter the generated token below and update the host to include your Gateway PIN or the IP address of your gateway."
|
||||
},
|
||||
"local_or_cloud": {
|
||||
"data": {
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from abc import abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
@@ -175,10 +174,6 @@ class PlaystationNetworkGroupsUpdateCoordinator(
|
||||
}
|
||||
)
|
||||
except PSNAWPForbiddenError as e:
|
||||
try:
|
||||
error = json.loads(e.args[0])
|
||||
except json.JSONDecodeError as err:
|
||||
raise PSNAWPServerError from err
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
@@ -189,7 +184,7 @@ class PlaystationNetworkGroupsUpdateCoordinator(
|
||||
translation_key="group_chat_forbidden",
|
||||
translation_placeholders={
|
||||
CONF_NAME: self.config_entry.title,
|
||||
"error_message": error["error"]["message"],
|
||||
"error_message": e.message or "",
|
||||
},
|
||||
)
|
||||
await self.async_shutdown()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user