mirror of
https://github.com/home-assistant/core.git
synced 2025-11-07 01:50:18 +00:00
Compare commits
144 Commits
cursor/add
...
fix_octopr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c83c555f2c | ||
|
|
7d65b4c941 | ||
|
|
abd0ee7bce | ||
|
|
9e3eb20a04 | ||
|
|
6dc655c3b4 | ||
|
|
9f595a94fb | ||
|
|
5dc215a143 | ||
|
|
306b78ba5f | ||
|
|
bccb646a07 | ||
|
|
4a5dc8cdd6 | ||
|
|
52a751507a | ||
|
|
533b9f969d | ||
|
|
5de7928bc0 | ||
|
|
aad9b07f86 | ||
|
|
3e2c401253 | ||
|
|
762e63d042 | ||
|
|
ec6d40a51c | ||
|
|
47c2c61626 | ||
|
|
73c941f6c5 | ||
|
|
685edb5f76 | ||
|
|
5987b6dcb9 | ||
|
|
cb029e0bb0 | ||
|
|
553ec35947 | ||
|
|
f93940bfa9 | ||
|
|
486f93eb28 | ||
|
|
462db36fef | ||
|
|
485f7f45e8 | ||
|
|
a446d8a98c | ||
|
|
b4a31fc578 | ||
|
|
22321c22cc | ||
|
|
4419c236e2 | ||
|
|
1731a2534c | ||
|
|
ec0edf47b1 | ||
|
|
57c69738e3 | ||
|
|
fb1f258b2b | ||
|
|
d419dd0c05 | ||
|
|
65960aa3f7 | ||
|
|
a25afe2834 | ||
|
|
4cdfa3bddb | ||
|
|
9e7bef9fa7 | ||
|
|
68a1b1f91f | ||
|
|
1659ca532d | ||
|
|
8ea16daae4 | ||
|
|
5bd89acf9a | ||
|
|
2b8db74be4 | ||
|
|
d7f9a7114d | ||
|
|
f7a59eb86e | ||
|
|
37eef965ad | ||
|
|
b706430e66 | ||
|
|
5012aa5cb0 | ||
|
|
1c5f7adf4e | ||
|
|
ff364e3913 | ||
|
|
0e2a4605ff | ||
|
|
ca5b9ce0d3 | ||
|
|
953196ec21 | ||
|
|
b5be3d5ac3 | ||
|
|
5d9e8287d3 | ||
|
|
dc291708ae | ||
|
|
257e82fe4e | ||
|
|
ab6d4d645e | ||
|
|
58ebd84326 | ||
|
|
76b24dafed | ||
|
|
431f563ff6 | ||
|
|
e308e610c6 | ||
|
|
5e77cbd185 | ||
|
|
2dbc7ff4b7 | ||
|
|
49a6c5776d | ||
|
|
98f6001c9c | ||
|
|
ce38a93177 | ||
|
|
92fbf468f2 | ||
|
|
e09ec4a6f3 | ||
|
|
db63e0c829 | ||
|
|
8ed88d4a58 | ||
|
|
d098ada777 | ||
|
|
1add999c5a | ||
|
|
fad217837f | ||
|
|
983af1af7b | ||
|
|
bcf2c4e9b6 | ||
|
|
c72f2fd546 | ||
|
|
f54864a476 | ||
|
|
fe1ff456c6 | ||
|
|
ec25ead5ac | ||
|
|
e8277cb67c | ||
|
|
da0fb37a20 | ||
|
|
28675eee33 | ||
|
|
84561cbc41 | ||
|
|
4e48c881aa | ||
|
|
af8cd0414b | ||
|
|
f54076da29 | ||
|
|
1d0eb97592 | ||
|
|
57f1c268ef | ||
|
|
01402e4f96 | ||
|
|
6137a643d8 | ||
|
|
1badfe3aff | ||
|
|
a549104fe1 | ||
|
|
2aab2ddc55 | ||
|
|
42e01362a5 | ||
|
|
c3cf24ba25 | ||
|
|
7809fb6a9b | ||
|
|
144fc2a443 | ||
|
|
c67e005b2c | ||
|
|
1c6913eec2 | ||
|
|
fb5c4a1375 | ||
|
|
60b8392478 | ||
|
|
7145fb96dd | ||
|
|
37d94aca6d | ||
|
|
9b697edfca | ||
|
|
22e30be946 | ||
|
|
bc9d35b85f | ||
|
|
4dfb6e4983 | ||
|
|
09d78ab5ad | ||
|
|
b2ebdb7ef0 | ||
|
|
83d6a30b2e | ||
|
|
19dee6d22a | ||
|
|
afd27630fb | ||
|
|
cad1f1da1d | ||
|
|
cd62bd86fd | ||
|
|
79c3bc9eca | ||
|
|
10439eea4b | ||
|
|
75cc866e72 | ||
|
|
8b2ca6c571 | ||
|
|
52db73e8e3 | ||
|
|
79d15ec91c | ||
|
|
5af91df2b9 | ||
|
|
89a85c3d8c | ||
|
|
e44c6391b1 | ||
|
|
99d3234855 | ||
|
|
32cc5123f5 | ||
|
|
93415175bb | ||
|
|
f04bb69dbc | ||
|
|
9f8c9940bd | ||
|
|
496f527dff | ||
|
|
385e6f58a8 | ||
|
|
c8c37ad628 | ||
|
|
cc57732e24 | ||
|
|
6011df8952 | ||
|
|
08e494aba5 | ||
|
|
77c428e4c7 | ||
|
|
c22a2b93fa | ||
|
|
7f84363bf4 | ||
|
|
0980c3a270 | ||
|
|
7cec3aa27c | ||
|
|
1ddb39f6d0 | ||
|
|
10d2e38315 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -88,6 +88,10 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -362,6 +362,7 @@ homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
homeassistant.components.network.*
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1817,8 +1817,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
"name": "Yale (non-US/Canada)",
|
||||
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
||||
}
|
||||
|
||||
5
homeassistant/brands/yale_august.json
Normal file
5
homeassistant/brands/yale_august.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "yale_august",
|
||||
"name": "Yale August (US/Canada)",
|
||||
"integrations": ["august", "august_ble"]
|
||||
}
|
||||
@@ -30,6 +30,7 @@ generate_data:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
@@ -57,3 +58,4 @@ generate_image:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
|
||||
@@ -58,7 +58,10 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import network
|
||||
from homeassistant.util import color as color_util, dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
)
|
||||
|
||||
from .config import AbstractConfig
|
||||
from .const import (
|
||||
@@ -844,7 +847,7 @@ def temperature_from_object(
|
||||
temp -= 273.15
|
||||
|
||||
if interval:
|
||||
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
||||
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.5.5"]
|
||||
"requirements": ["aioamazondevices==6.5.6"]
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="daily_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
key=TYPE_LIGHTNING_PER_DAY,
|
||||
translation_key="lightning_strikes_per_day",
|
||||
native_unit_of_measurement="strikes",
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="monthly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="weekly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="yearly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
|
||||
@@ -39,11 +39,11 @@ from .const import (
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
CONF_TURN_ON_COMMAND,
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEFAULT_DEVICE_CLASS,
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||
DEFAULT_GET_SOURCES,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCREENCAP_INTERVAL,
|
||||
DEVICE_AUTO,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
@@ -89,8 +89,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=host): str,
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
|
||||
DEVICE_CLASSES
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(value=k, label=v)
|
||||
for k, v in DEVICE_CLASSES.items()
|
||||
],
|
||||
translation_key="device_class",
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
},
|
||||
|
||||
@@ -15,15 +15,19 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
|
||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||
|
||||
DEFAULT_ADB_SERVER_PORT = 5037
|
||||
DEFAULT_DEVICE_CLASS = "auto"
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||
DEFAULT_GET_SOURCES = True
|
||||
DEFAULT_PORT = 5555
|
||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||
|
||||
DEVICE_AUTO = "auto"
|
||||
DEVICE_ANDROIDTV = "androidtv"
|
||||
DEVICE_FIRETV = "firetv"
|
||||
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
||||
DEVICE_CLASSES = {
|
||||
DEVICE_AUTO: "auto",
|
||||
DEVICE_ANDROIDTV: "Android TV",
|
||||
DEVICE_FIRETV: "Fire TV",
|
||||
}
|
||||
|
||||
PROP_ETHMAC = "ethmac"
|
||||
PROP_SERIALNO = "serialno"
|
||||
|
||||
@@ -65,6 +65,13 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"device_class": {
|
||||
"options": {
|
||||
"auto": "Auto-detect device type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"adb_command": {
|
||||
"description": "Sends an ADB command to an Android / Fire TV device.",
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
"""The blueprint integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import websocket_api
|
||||
@@ -30,7 +28,4 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the blueprint integration."""
|
||||
websocket_api.async_setup(hass)
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, Platform.UPDATE, DOMAIN, None, config)
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -204,8 +204,8 @@ class DomainBlueprints:
|
||||
self.hass = hass
|
||||
self.domain = domain
|
||||
self.logger = logger
|
||||
self.blueprint_in_use = blueprint_in_use
|
||||
self.reload_blueprint_consumers = reload_blueprint_consumers
|
||||
self._blueprint_in_use = blueprint_in_use
|
||||
self._reload_blueprint_consumers = reload_blueprint_consumers
|
||||
self._blueprints: dict[str, Blueprint | None] = {}
|
||||
self._load_lock = asyncio.Lock()
|
||||
self._blueprint_schema = blueprint_schema
|
||||
@@ -325,7 +325,7 @@ class DomainBlueprints:
|
||||
|
||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||
"""Remove a blueprint file."""
|
||||
if self.blueprint_in_use(self.hass, blueprint_path):
|
||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
||||
raise BlueprintInUse(self.domain, blueprint_path)
|
||||
path = self.blueprint_folder / blueprint_path
|
||||
await self.hass.async_add_executor_job(path.unlink)
|
||||
@@ -362,7 +362,7 @@ class DomainBlueprints:
|
||||
self._blueprints[blueprint_path] = blueprint
|
||||
|
||||
if overrides_existing:
|
||||
await self.reload_blueprint_consumers(self.hass, blueprint_path)
|
||||
await self._reload_blueprint_consumers(self.hass, blueprint_path)
|
||||
|
||||
return overrides_existing
|
||||
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
"""Update entities for blueprints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any, Final
|
||||
|
||||
from homeassistant.components import automation, script
|
||||
from . import importer, models
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.const import CONF_SOURCE_URL
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import event as event_helper
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DOMAIN as BLUEPRINT_DOMAIN
|
||||
from .errors import BlueprintException
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_LATEST_VERSION_PLACEHOLDER: Final = "remote"
|
||||
DATA_UPDATE_MANAGER: Final = "update_manager"
|
||||
REFRESH_INTERVAL: Final = timedelta(days=1)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BlueprintUsage:
|
||||
"""Details about a blueprint currently in use."""
|
||||
|
||||
domain: str
|
||||
path: str
|
||||
domain_blueprints: models.DomainBlueprints
|
||||
blueprint: models.Blueprint
|
||||
entities: list[str]
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the blueprint update platform."""
|
||||
data = hass.data.setdefault(BLUEPRINT_DOMAIN, {})
|
||||
|
||||
if (manager := data.get(DATA_UPDATE_MANAGER)) is None:
|
||||
manager = BlueprintUpdateManager(hass, async_add_entities)
|
||||
data[DATA_UPDATE_MANAGER] = manager
|
||||
await manager.async_start()
|
||||
return
|
||||
|
||||
manager.replace_add_entities(async_add_entities)
|
||||
await manager.async_recreate_entities()
|
||||
|
||||
|
||||
class BlueprintUpdateManager:
|
||||
"""Manage blueprint update entities based on blueprint usage."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Initialize the manager."""
|
||||
self.hass = hass
|
||||
self._async_add_entities = async_add_entities
|
||||
self._entities: dict[tuple[str, str], BlueprintUpdateEntity] = {}
|
||||
self._lock = asyncio.Lock()
|
||||
self._refresh_cancel: CALLBACK_TYPE | None = None
|
||||
self._started = False
|
||||
self._interval_unsub: CALLBACK_TYPE | None = None
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start tracking blueprint usage."""
|
||||
if self._started:
|
||||
return
|
||||
self._started = True
|
||||
|
||||
self._interval_unsub = event_helper.async_track_time_interval(
|
||||
self.hass, self._handle_time_interval, REFRESH_INTERVAL
|
||||
)
|
||||
await self.async_refresh_entities()
|
||||
|
||||
def replace_add_entities(self, async_add_entities: AddEntitiesCallback) -> None:
|
||||
"""Update the callback used to register entities."""
|
||||
self._async_add_entities = async_add_entities
|
||||
|
||||
async def async_recreate_entities(self) -> None:
|
||||
"""Recreate entities after the platform has been reloaded."""
|
||||
async with self._lock:
|
||||
entities = list(self._entities.values())
|
||||
self._entities.clear()
|
||||
|
||||
for entity in entities:
|
||||
await entity.async_remove()
|
||||
|
||||
await self.async_refresh_entities()
|
||||
|
||||
async def async_refresh_entities(self) -> None:
|
||||
"""Refresh update entities based on current blueprint usage."""
|
||||
async with self._lock:
|
||||
usage_map = await self._async_collect_in_use_blueprints()
|
||||
|
||||
current_keys = set(self._entities)
|
||||
new_keys = set(usage_map)
|
||||
|
||||
for key in current_keys - new_keys:
|
||||
entity = self._entities.pop(key)
|
||||
await entity.async_remove()
|
||||
|
||||
new_entities: list[BlueprintUpdateEntity] = []
|
||||
|
||||
for key in new_keys - current_keys:
|
||||
usage = usage_map[key]
|
||||
entity = BlueprintUpdateEntity(self, usage)
|
||||
self._entities[key] = entity
|
||||
new_entities.append(entity)
|
||||
|
||||
for key in new_keys & current_keys:
|
||||
self._entities[key].update_usage(usage_map[key])
|
||||
self._entities[key].async_write_ha_state()
|
||||
|
||||
if new_entities:
|
||||
self._async_add_entities(new_entities)
|
||||
|
||||
def async_schedule_refresh(self) -> None:
|
||||
"""Schedule an asynchronous refresh."""
|
||||
if self._refresh_cancel is not None:
|
||||
return
|
||||
|
||||
self._refresh_cancel = event_helper.async_call_later(
|
||||
self.hass, 0, self._handle_scheduled_refresh
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_scheduled_refresh(self, _now: Any) -> None:
|
||||
"""Run a scheduled refresh task."""
|
||||
self._refresh_cancel = None
|
||||
self.hass.async_create_task(self.async_refresh_entities())
|
||||
|
||||
@callback
|
||||
def _handle_time_interval(self, _now: Any) -> None:
|
||||
"""Handle scheduled interval refresh."""
|
||||
self.async_schedule_refresh()
|
||||
|
||||
async def _async_collect_in_use_blueprints(self) -> dict[tuple[str, str], BlueprintUsage]:
|
||||
"""Collect blueprint usage information for automations and scripts."""
|
||||
|
||||
usage_keys: set[tuple[str, str]] = set()
|
||||
|
||||
if automation.DATA_COMPONENT in self.hass.data:
|
||||
component = self.hass.data[automation.DATA_COMPONENT]
|
||||
for automation_entity in list(component.entities):
|
||||
if (path := getattr(automation_entity, "referenced_blueprint", None)):
|
||||
usage_keys.add((automation.DOMAIN, path))
|
||||
|
||||
if script.DOMAIN in self.hass.data:
|
||||
component = self.hass.data[script.DOMAIN]
|
||||
for script_entity in list(component.entities):
|
||||
if (path := getattr(script_entity, "referenced_blueprint", None)):
|
||||
usage_keys.add((script.DOMAIN, path))
|
||||
|
||||
domain_blueprints_map = self.hass.data.get(BLUEPRINT_DOMAIN, {})
|
||||
usage_map: dict[tuple[str, str], BlueprintUsage] = {}
|
||||
|
||||
for domain, path in usage_keys:
|
||||
domain_blueprints: models.DomainBlueprints | None = domain_blueprints_map.get(
|
||||
domain
|
||||
)
|
||||
|
||||
if domain_blueprints is None:
|
||||
continue
|
||||
|
||||
if not domain_blueprints.blueprint_in_use(self.hass, path):
|
||||
continue
|
||||
|
||||
try:
|
||||
blueprint = await domain_blueprints.async_get_blueprint(path)
|
||||
except BlueprintException:
|
||||
continue
|
||||
|
||||
source_url = blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
if not source_url:
|
||||
continue
|
||||
|
||||
if domain == automation.DOMAIN:
|
||||
entities = automation.automations_with_blueprint(self.hass, path)
|
||||
elif domain == script.DOMAIN:
|
||||
entities = script.scripts_with_blueprint(self.hass, path)
|
||||
else:
|
||||
entities = []
|
||||
|
||||
usage_map[(domain, path)] = BlueprintUsage(
|
||||
domain=domain,
|
||||
path=path,
|
||||
domain_blueprints=domain_blueprints,
|
||||
blueprint=blueprint,
|
||||
entities=entities,
|
||||
)
|
||||
|
||||
return usage_map
|
||||
|
||||
|
||||
class BlueprintUpdateEntity(UpdateEntity):
|
||||
"""Define a blueprint update entity."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(self, manager: BlueprintUpdateManager, usage: BlueprintUsage) -> None:
|
||||
"""Initialize the update entity."""
|
||||
self._manager = manager
|
||||
self._domain = usage.domain
|
||||
self._path = usage.path
|
||||
self._domain_blueprints = usage.domain_blueprints
|
||||
self._blueprint = usage.blueprint
|
||||
self._entities_in_use = usage.entities
|
||||
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
self._attr_unique_id = f"{self._domain}:{self._path}"
|
||||
self._attr_in_progress = False
|
||||
|
||||
self.update_usage(usage)
|
||||
|
||||
@callback
|
||||
def update_usage(self, usage: BlueprintUsage) -> None:
|
||||
"""Update the entity with latest usage information."""
|
||||
self._domain_blueprints = usage.domain_blueprints
|
||||
self._blueprint = usage.blueprint
|
||||
self._entities_in_use = usage.entities
|
||||
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
|
||||
self._attr_name = usage.blueprint.name
|
||||
self._attr_release_summary = usage.blueprint.metadata.get("description")
|
||||
self._attr_installed_version = usage.blueprint.metadata.get("version")
|
||||
self._attr_release_url = self._source_url
|
||||
self._attr_available = self._source_url is not None
|
||||
self._attr_latest_version = (
|
||||
_LATEST_VERSION_PLACEHOLDER
|
||||
if self._source_url is not None
|
||||
else self._attr_installed_version
|
||||
)
|
||||
|
||||
async def async_install(self, version: str | None, backup: bool) -> None:
|
||||
"""Install (refresh) the blueprint from its source."""
|
||||
if self._source_url is None:
|
||||
raise HomeAssistantError("Blueprint does not define a source URL")
|
||||
|
||||
self._attr_in_progress = True
|
||||
self.async_write_ha_state()
|
||||
usage: BlueprintUsage | None = None
|
||||
|
||||
try:
|
||||
imported = await importer.fetch_blueprint_from_url(
|
||||
self.hass, self._source_url
|
||||
)
|
||||
blueprint = imported.blueprint
|
||||
|
||||
if blueprint.domain != self._domain:
|
||||
raise HomeAssistantError(
|
||||
"Downloaded blueprint domain does not match the existing blueprint"
|
||||
)
|
||||
|
||||
await self._domain_blueprints.async_add_blueprint(
|
||||
blueprint, self._path, allow_override=True
|
||||
)
|
||||
|
||||
usage = BlueprintUsage(
|
||||
domain=self._domain,
|
||||
path=self._path,
|
||||
domain_blueprints=self._domain_blueprints,
|
||||
blueprint=blueprint,
|
||||
entities=self._entities_in_use,
|
||||
)
|
||||
|
||||
except HomeAssistantError:
|
||||
raise
|
||||
except Exception as err: # noqa: BLE001 - Provide context for unexpected errors
|
||||
raise HomeAssistantError("Failed to update blueprint from source") from err
|
||||
finally:
|
||||
self._attr_in_progress = False
|
||||
|
||||
if usage is not None:
|
||||
self.update_usage(usage)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._manager.async_schedule_refresh()
|
||||
@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
@@ -50,6 +50,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
if brother.serial.lower() != entry.unique_id:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="serial_mismatch",
|
||||
translation_placeholders={
|
||||
"device": entry.title,
|
||||
},
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
@@ -21,6 +22,7 @@ from .const import (
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPE_LASER,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
@@ -28,7 +30,12 @@ from .const import (
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
@@ -42,7 +49,12 @@ DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
|
||||
@@ -7,7 +7,10 @@ from typing import Final
|
||||
|
||||
DOMAIN: Final = "brother"
|
||||
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
PRINTER_TYPE_LASER = "laser"
|
||||
PRINTER_TYPE_INK = "ink"
|
||||
|
||||
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
30
homeassistant/components/brother/entity.py
Normal file
30
homeassistant/components/brother/entity.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Define the Brother entity."""
|
||||
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherDataUpdateCoordinator
|
||||
|
||||
|
||||
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
"""Define a Brother Printer entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BrotherDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
@@ -19,13 +19,15 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
from .entity import BrotherPrinterEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_COUNTER = "counter"
|
||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||
@@ -330,12 +332,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class BrotherPrinterSensor(
|
||||
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Define an Brother Printer sensor."""
|
||||
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Define a Brother Printer sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: BrotherSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -345,16 +344,7 @@ class BrotherPrinterSensor(
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@@ -38,11 +38,11 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"type": "Type of the printer"
|
||||
"type": "Printer type"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "Brother printer type: ink or laser."
|
||||
"type": "The type of the Brother printer."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
@@ -207,8 +207,19 @@
|
||||
"cannot_connect": {
|
||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||
},
|
||||
"serial_mismatch": {
|
||||
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"printer_type": {
|
||||
"options": {
|
||||
"ink": "ink",
|
||||
"laser": "laser"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
|
||||
return HVACAction.HEATING
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_parse_hvac_mode",
|
||||
translation_key="failed_to_parse_hvac_action",
|
||||
translation_placeholders={
|
||||
"mode_and_active": mode_and_active,
|
||||
"current_temperature": str(self.current_temperature),
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"failed_to_parse_hvac_action": {
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
|
||||
},
|
||||
"failed_to_parse_hvac_mode": {
|
||||
"message": "Cannot parse response to HVACMode: {mode}"
|
||||
|
||||
@@ -71,8 +71,11 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
|
||||
services = await account_link.async_fetch_available_services(
|
||||
hass.data[DATA_CLOUD]
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError):
|
||||
return []
|
||||
except (aiohttp.ClientError, TimeoutError) as err:
|
||||
raise config_entry_oauth2_flow.ImplementationUnavailableError(
|
||||
"Cannot provide OAuth2 implementation for cloud services. "
|
||||
"Failed to fetch from account link server."
|
||||
) from err
|
||||
|
||||
hass.data[DATA_SERVICES] = services
|
||||
|
||||
|
||||
@@ -6,3 +6,5 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pycoolmasternet_async import CoolMasterNet
|
||||
@@ -12,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +47,34 @@ class CoolmasterDataUpdateCoordinator(
|
||||
|
||||
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
||||
"""Fetch data from Coolmaster."""
|
||||
try:
|
||||
return await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
raise UpdateFailed from error
|
||||
retries_left = MAX_RETRIES
|
||||
status: dict[str, CoolMasterNetUnit] = {}
|
||||
while retries_left > 0 and not status:
|
||||
retries_left -= 1
|
||||
try:
|
||||
status = await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
if retries_left == 0:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
|
||||
) from error
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster (%d retries left): %s",
|
||||
retries_left,
|
||||
str(error),
|
||||
)
|
||||
else:
|
||||
if status:
|
||||
return status
|
||||
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster: empty status received (%d retries left)",
|
||||
retries_left,
|
||||
)
|
||||
|
||||
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
|
||||
await asyncio.sleep(backoff)
|
||||
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
|
||||
)
|
||||
|
||||
@@ -81,6 +81,9 @@
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
@@ -89,9 +92,6 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"error": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
||||
}
|
||||
|
||||
@@ -5,8 +5,9 @@ from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.command import CommandWithMessageHandling
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events import WorkModeEvent, auto_empty
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
@@ -34,6 +35,9 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
||||
|
||||
current_option_fn: Callable[[EventT], str | None]
|
||||
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
||||
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
|
||||
lambda cap, option: cap.set(option)
|
||||
)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
@@ -58,6 +62,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
|
||||
set_option_fn=lambda cap, option: cap.set(None, option),
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -106,14 +118,17 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_event(event: EventT) -> None:
|
||||
self._attr_current_option = self.entity_description.current_option_fn(event)
|
||||
self.async_write_ha_state()
|
||||
if (option := self.entity_description.current_option_fn(event)) is not None:
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.event, on_event)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
await self._device.execute_command(
|
||||
self.entity_description.set_option_fn(self._capability, option)
|
||||
)
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
|
||||
@@ -17,7 +17,6 @@ from deebot_client.events import (
|
||||
NetworkInfoEvent,
|
||||
StatsEvent,
|
||||
TotalStatsEvent,
|
||||
auto_empty,
|
||||
station,
|
||||
)
|
||||
from sucks import VacBot
|
||||
@@ -159,14 +158,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=get_options(station.State),
|
||||
),
|
||||
EcovacsSensorEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
value_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=get_options(auto_empty.Frequency),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -129,6 +129,16 @@
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"min_10": "10 minutes",
|
||||
"min_15": "15 minutes",
|
||||
"min_25": "25 minutes",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
@@ -149,13 +159,6 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"name": "Error",
|
||||
"state_attributes": {
|
||||
|
||||
@@ -151,14 +151,12 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
key="RAIN_COUNT_MM",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||
key="RAIN_COUNT_INCHES",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||
|
||||
@@ -296,7 +296,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_discovered_connection()
|
||||
return await self.async_step_manual_connection()
|
||||
|
||||
current_unique_ids = self._async_current_ids()
|
||||
current_unique_ids = self._async_current_ids(include_ignore=False)
|
||||
current_hosts = {
|
||||
hostname_from_url(entry.data[CONF_HOST])
|
||||
for entry in self._async_current_entries(include_ignore=False)
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Virtual integration: Enmax Energy."""
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "enmax",
|
||||
"name": "Enmax Energy",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
}
|
||||
@@ -75,10 +75,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> bool:
|
||||
"""Unload an esphome config entry."""
|
||||
entry_data = await cleanup_instance(entry)
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
entry, entry_data.loaded_platforms
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, entry.runtime_data.loaded_platforms
|
||||
)
|
||||
if unload_ok:
|
||||
await cleanup_instance(entry)
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.5.0",
|
||||
"aioesphomeapi==42.6.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -77,7 +77,7 @@ class EufyLifeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data={CONF_MODEL: model},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if (
|
||||
|
||||
@@ -129,6 +129,51 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconf_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reconf_entry.data,
|
||||
**user_input,
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
return self.async_update_reload_and_abort(
|
||||
reconf_entry,
|
||||
data_updates={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values=user_input or reconf_entry.data.copy(),
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -20,6 +21,20 @@
|
||||
},
|
||||
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::firefly_iii::config::step::user::data_description::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:component::firefly_iii::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Firefly III instance.",
|
||||
"title": "Reconfigure Firefly III Integration"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/foscam",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["libpyfoscamcgi"],
|
||||
"requirements": ["libpyfoscamcgi==0.0.8"]
|
||||
"requirements": ["libpyfoscamcgi==0.0.9"]
|
||||
}
|
||||
|
||||
@@ -453,7 +453,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
async_register_built_in_panel(hass, "light")
|
||||
async_register_built_in_panel(hass, "safety")
|
||||
async_register_built_in_panel(hass, "security")
|
||||
async_register_built_in_panel(hass, "climate")
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251029.1"]
|
||||
"requirements": ["home-assistant-frontend==20251105.0"]
|
||||
}
|
||||
|
||||
@@ -43,6 +43,9 @@ from .coordinator import GiosConfigEntry, GiosDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GiosSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
@@ -14,6 +14,10 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"station_id": "Measuring station"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "Config entry name, by default, this is the name of your Home Assistant instance.",
|
||||
"station_id": "The name of the measuring station where the environmental data is collected."
|
||||
},
|
||||
"title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import itertools
|
||||
|
||||
from aiohasupervisor.models.mounts import MountState
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -13,8 +16,14 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
|
||||
from .entity import HassioAddonEntity
|
||||
from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
ATTR_STARTED,
|
||||
ATTR_STATE,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_MOUNTS,
|
||||
)
|
||||
from .entity import HassioAddonEntity, HassioMountEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -34,6 +43,16 @@ ADDON_ENTITY_DESCRIPTIONS = (
|
||||
),
|
||||
)
|
||||
|
||||
MOUNT_ENTITY_DESCRIPTIONS = (
|
||||
HassioBinarySensorEntityDescription(
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_registry_enabled_default=False,
|
||||
key=ATTR_STATE,
|
||||
translation_key="mount",
|
||||
target=MountState.ACTIVE.value,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -44,13 +63,26 @@ async def async_setup_entry(
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
itertools.chain(
|
||||
[
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
[
|
||||
HassioMountBinarySensor(
|
||||
mount=mount,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for mount in coordinator.data[DATA_KEY_MOUNTS].values()
|
||||
for entity_description in MOUNT_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
@@ -68,3 +100,20 @@ class HassioAddonBinarySensor(HassioAddonEntity, BinarySensorEntity):
|
||||
if self.entity_description.target is None:
|
||||
return value
|
||||
return value == self.entity_description.target
|
||||
|
||||
|
||||
class HassioMountBinarySensor(HassioMountEntity, BinarySensorEntity):
|
||||
"""Binary sensor for Hass.io mount."""
|
||||
|
||||
entity_description: HassioBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
value = getattr(
|
||||
self.coordinator.data[DATA_KEY_MOUNTS][self._mount.name],
|
||||
self.entity_description.key,
|
||||
)
|
||||
if self.entity_description.target is None:
|
||||
return value
|
||||
return value == self.entity_description.target
|
||||
|
||||
@@ -90,6 +90,7 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
|
||||
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
|
||||
DATA_ADDONS_INFO = "hassio_addons_info"
|
||||
DATA_ADDONS_STATS = "hassio_addons_stats"
|
||||
DATA_MOUNTS_INFO = "hassio_mounts_info"
|
||||
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
ATTR_AUTO_UPDATE = "auto_update"
|
||||
@@ -110,6 +111,7 @@ DATA_KEY_SUPERVISOR = "supervisor"
|
||||
DATA_KEY_CORE = "core"
|
||||
DATA_KEY_HOST = "host"
|
||||
DATA_KEY_SUPERVISOR_ISSUES = "supervisor_issues"
|
||||
DATA_KEY_MOUNTS = "mounts"
|
||||
|
||||
PLACEHOLDER_KEY_ADDON = "addon"
|
||||
PLACEHOLDER_KEY_ADDON_INFO = "addon_info"
|
||||
@@ -174,3 +176,4 @@ class SupervisorEntityModel(StrEnum):
|
||||
CORE = "Home Assistant Core"
|
||||
SUPERVISOR = "Home Assistant Supervisor"
|
||||
HOST = "Home Assistant Host"
|
||||
MOUNT = "Home Assistant Mount"
|
||||
|
||||
@@ -10,6 +10,11 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
|
||||
from aiohasupervisor.models import StoreInfo
|
||||
from aiohasupervisor.models.mounts import (
|
||||
CIFSMountResponse,
|
||||
MountsInfo,
|
||||
NFSMountResponse,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
|
||||
@@ -41,9 +46,11 @@ from .const import (
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_MOUNTS,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DATA_KEY_SUPERVISOR_ISSUES,
|
||||
DATA_MOUNTS_INFO,
|
||||
DATA_NETWORK_INFO,
|
||||
DATA_OS_INFO,
|
||||
DATA_STORE,
|
||||
@@ -174,6 +181,16 @@ def get_core_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||
return hass.data.get(DATA_CORE_INFO)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_mounts_info(hass: HomeAssistant) -> MountsInfo | None:
|
||||
"""Return Home Assistant mounts information from Supervisor.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
return hass.data.get(DATA_MOUNTS_INFO)
|
||||
|
||||
|
||||
@callback
|
||||
@bind_hass
|
||||
def get_issues_info(hass: HomeAssistant) -> SupervisorIssues | None:
|
||||
@@ -203,6 +220,25 @@ def async_register_addons_in_dev_reg(
|
||||
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_mounts_in_dev_reg(
|
||||
entry_id: str,
|
||||
dev_reg: dr.DeviceRegistry,
|
||||
mounts: list[CIFSMountResponse | NFSMountResponse],
|
||||
) -> None:
|
||||
"""Register mounts in the device registry."""
|
||||
for mount in mounts:
|
||||
params = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"mount_{mount.name}")},
|
||||
manufacturer="Home Assistant",
|
||||
model=SupervisorEntityModel.MOUNT,
|
||||
model_id=f"{mount.usage}/{mount.type}",
|
||||
name=mount.name,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_os_in_dev_reg(
|
||||
entry_id: str, dev_reg: dr.DeviceRegistry, os_dict: dict[str, Any]
|
||||
@@ -272,12 +308,12 @@ def async_register_supervisor_in_dev_reg(
|
||||
|
||||
|
||||
@callback
|
||||
def async_remove_addons_from_dev_reg(
|
||||
dev_reg: dr.DeviceRegistry, addons: set[str]
|
||||
def async_remove_devices_from_dev_reg(
|
||||
dev_reg: dr.DeviceRegistry, devices: set[str]
|
||||
) -> None:
|
||||
"""Remove addons from the device registry."""
|
||||
for addon_slug in addons:
|
||||
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, addon_slug)}):
|
||||
"""Remove devices from the device registry."""
|
||||
for device in devices:
|
||||
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, device)}):
|
||||
dev_reg.async_remove_device(dev.id)
|
||||
|
||||
|
||||
@@ -362,12 +398,19 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
**get_supervisor_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||
new_data[DATA_KEY_MOUNTS] = {
|
||||
mount.name: mount
|
||||
for mount in getattr(get_mounts_info(self.hass), "mounts", [])
|
||||
}
|
||||
|
||||
# If this is the initial refresh, register all addons and return the dict
|
||||
if is_first_update:
|
||||
async_register_addons_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
async_register_mounts_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
|
||||
)
|
||||
async_register_core_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
||||
)
|
||||
@@ -389,7 +432,20 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if device.model == SupervisorEntityModel.ADDON
|
||||
}
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
||||
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
# Remove mounts that no longer exists from device registry
|
||||
supervisor_mount_devices = {
|
||||
device.name
|
||||
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
|
||||
self.entry_id
|
||||
)
|
||||
if device.model == SupervisorEntityModel.MOUNT
|
||||
}
|
||||
if stale_mounts := supervisor_mount_devices - set(new_data[DATA_KEY_MOUNTS]):
|
||||
async_remove_devices_from_dev_reg(
|
||||
self.dev_reg, {f"mount_{stale_mount}" for stale_mount in stale_mounts}
|
||||
)
|
||||
|
||||
if not self.is_hass_os and (
|
||||
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
||||
@@ -397,11 +453,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
# If there are new add-ons, we should reload the config entry so we can
|
||||
# If there are new add-ons or mounts, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
if self.data and set(new_data[DATA_KEY_ADDONS]) - set(
|
||||
self.data[DATA_KEY_ADDONS]
|
||||
if self.data and (
|
||||
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
|
||||
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.entry_id)
|
||||
@@ -428,6 +485,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
DATA_CORE_INFO: hassio.get_core_info(),
|
||||
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
|
||||
DATA_OS_INFO: hassio.get_os_info(),
|
||||
DATA_MOUNTS_INFO: self.supervisor_client.mounts.info(),
|
||||
}
|
||||
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
||||
updates[DATA_CORE_STATS] = hassio.get_core_stats()
|
||||
|
||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -15,6 +17,7 @@ from .const import (
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_MOUNTS,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DOMAIN,
|
||||
@@ -192,3 +195,34 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
)
|
||||
if CONTAINER_STATS in update_types:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Mount."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
mount: CIFSMountResponse | NFSMountResponse,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = (
|
||||
f"home_assistant_mount_{mount.name}_{entity_description.key}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"mount_{mount.name}")}
|
||||
)
|
||||
self._mount = mount
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._mount.name in self.coordinator.data[DATA_KEY_MOUNTS]
|
||||
)
|
||||
|
||||
@@ -44,7 +44,6 @@ from .const import (
|
||||
EVENT_SUPPORTED_CHANGED,
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
@@ -87,7 +86,6 @@ ISSUE_KEYS_FOR_REPAIRS = {
|
||||
"issue_system_disk_lifetime",
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"mount": {
|
||||
"name": "Connected"
|
||||
},
|
||||
"state": {
|
||||
"name": "Running"
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.83", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.84", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -75,6 +75,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
|
||||
context: ConfigFlowContext
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -112,7 +113,6 @@ class HomeAssistantConnectZBT2ConfigFlow(
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the config flow."""
|
||||
|
||||
@@ -38,6 +38,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir, storage
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.http import (
|
||||
KEY_ALLOW_CONFIGURED_CORS,
|
||||
KEY_AUTHENTICATED, # noqa: F401
|
||||
@@ -109,7 +110,7 @@ HTTP_SCHEMA: Final = vol.All(
|
||||
cv.deprecated(CONF_BASE_URL),
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_SERVER_HOST, default=_DEFAULT_BIND): vol.All(
|
||||
vol.Optional(CONF_SERVER_HOST): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_SERVER_PORT, default=SERVER_PORT): cv.port,
|
||||
@@ -207,7 +208,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if conf is None:
|
||||
conf = cast(ConfData, HTTP_SCHEMA({}))
|
||||
|
||||
server_host = conf[CONF_SERVER_HOST]
|
||||
if CONF_SERVER_HOST in conf and is_hassio(hass):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"server_host_may_break_hassio",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="server_host_may_break_hassio",
|
||||
)
|
||||
|
||||
server_host = conf.get(CONF_SERVER_HOST, _DEFAULT_BIND)
|
||||
server_port = conf[CONF_SERVER_PORT]
|
||||
ssl_certificate = conf.get(CONF_SSL_CERTIFICATE)
|
||||
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
{
|
||||
"issues": {
|
||||
"server_host_may_break_hassio": {
|
||||
"description": "The `server_host` configuration option in the HTTP integration is prone to break the communication between Home Assistant Core and Supervisor, and will be removed in a future release.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
|
||||
"title": "The `server_host` HTTP configuration may break Home Assistant Core - Supervisor communication"
|
||||
},
|
||||
"ssl_configured_without_configured_urls": {
|
||||
"description": "Home Assistant detected that SSL has been set up on your instance, however, no custom external internet URL has been set.\n\nThis may result in unexpected behavior. Text-to-speech may fail, and integrations may not be able to connect back to your instance correctly.\n\nTo address this issue, go to Settings > System > Network; under the \"Home Assistant URL\" section, configure your new \"Internet\" and \"Local network\" addresses that match your new SSL configuration.",
|
||||
"title": "SSL is configured without an external URL or internal URL"
|
||||
|
||||
@@ -20,6 +20,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"rf_message_rssi": {
|
||||
"default": "mdi:signal"
|
||||
}
|
||||
},
|
||||
"water_heater": {
|
||||
"boiler": {
|
||||
"state": {
|
||||
|
||||
@@ -61,6 +61,16 @@ SENSOR_TYPES: tuple[IncomfortSensorEntityDescription, ...] = (
|
||||
value_key="tap_temp",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
# A lower RSSI value is better
|
||||
# A typical RSSI value is 28 for connection just in range
|
||||
IncomfortSensorEntityDescription(
|
||||
key="rf_message_rssi",
|
||||
translation_key="rf_message_rssi",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_key="rf_message_rssi",
|
||||
extra_key="rfstatus_cntr",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -76,6 +76,9 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"rf_message_rssi": {
|
||||
"name": "RSSI"
|
||||
},
|
||||
"tap_temperature": {
|
||||
"name": "Tap temperature"
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class KegtronConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -85,7 +85,7 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovered_adv:
|
||||
self._discovered_advs[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
self._ble_device = discovery_info.device
|
||||
address = discovery_info.address
|
||||
|
||||
@@ -299,8 +299,8 @@ def _create_climate_ui(xknx: XKNX, conf: ConfigExtractor, name: str) -> XknxClim
|
||||
group_address_active_state=conf.get_state_and_passive(CONF_GA_ACTIVE),
|
||||
group_address_command_value_state=conf.get_state_and_passive(CONF_GA_VALVE),
|
||||
sync_state=sync_state,
|
||||
min_temp=conf.get(ClimateConf.MIN_TEMP),
|
||||
max_temp=conf.get(ClimateConf.MAX_TEMP),
|
||||
min_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MIN_TEMP),
|
||||
max_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MAX_TEMP),
|
||||
mode=climate_mode,
|
||||
group_address_fan_speed=conf.get_write(CONF_GA_FAN_SPEED),
|
||||
group_address_fan_speed_state=conf.get_state_and_passive(CONF_GA_FAN_SPEED),
|
||||
@@ -486,7 +486,7 @@ class _KnxClimate(ClimateEntity, _KnxEntityBase):
|
||||
ha_controller_modes.append(self._last_hvac_mode)
|
||||
ha_controller_modes.append(HVACMode.OFF)
|
||||
|
||||
hvac_modes = list(set(filter(None, ha_controller_modes)))
|
||||
hvac_modes = sorted(set(filter(None, ha_controller_modes)))
|
||||
return (
|
||||
hvac_modes
|
||||
if hvac_modes
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.10.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.26.81530"
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class KulerskyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -79,7 +79,7 @@ class Ld2410BleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -35,7 +35,7 @@ class LeaoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -85,7 +85,7 @@ class LedBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -622,6 +622,7 @@ ENERGY_USAGE_SENSORS: tuple[ThinQEnergySensorEntityDescription, ...] = (
|
||||
usage_period=USAGE_MONTHLY,
|
||||
start_date_fn=lambda today: today,
|
||||
end_date_fn=lambda today: today,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
ThinQEnergySensorEntityDescription(
|
||||
key="last_month",
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylitterbot"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pylitterbot==2024.2.7"]
|
||||
"requirements": ["pylitterbot==2025.0.0"]
|
||||
}
|
||||
|
||||
@@ -527,4 +527,57 @@ DISCOVERY_SCHEMAS = [
|
||||
vendor_id=(4447,),
|
||||
product_id=(8194,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SELECT,
|
||||
entity_description=MatterSelectEntityDescription(
|
||||
key="AqaraOccupancySensorBooleanStateConfigurationCurrentSensitivityLevel",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="sensitivity_level",
|
||||
options=["low", "standard", "high"],
|
||||
device_to_ha={
|
||||
0: "low",
|
||||
1: "standard",
|
||||
2: "high",
|
||||
}.get,
|
||||
ha_to_device={
|
||||
"low": 0,
|
||||
"standard": 1,
|
||||
"high": 2,
|
||||
}.get,
|
||||
),
|
||||
entity_class=MatterAttributeSelectEntity,
|
||||
required_attributes=(
|
||||
clusters.BooleanStateConfiguration.Attributes.CurrentSensitivityLevel,
|
||||
),
|
||||
vendor_id=(4447,),
|
||||
product_id=(
|
||||
8197,
|
||||
8195,
|
||||
),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SELECT,
|
||||
entity_description=MatterSelectEntityDescription(
|
||||
key="HeimanOccupancySensorBooleanStateConfigurationCurrentSensitivityLevel",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="sensitivity_level",
|
||||
options=["low", "standard", "high"],
|
||||
device_to_ha={
|
||||
0: "low",
|
||||
1: "standard",
|
||||
2: "high",
|
||||
}.get,
|
||||
ha_to_device={
|
||||
"low": 0,
|
||||
"standard": 1,
|
||||
"high": 2,
|
||||
}.get,
|
||||
),
|
||||
entity_class=MatterAttributeSelectEntity,
|
||||
required_attributes=(
|
||||
clusters.BooleanStateConfiguration.Attributes.CurrentSensitivityLevel,
|
||||
),
|
||||
vendor_id=(4619,),
|
||||
product_id=(4097,),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -93,7 +93,7 @@ class InspectorBLEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._discovery_info = self._discovered_devices[address]
|
||||
return await self.async_step_check_connection()
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -49,6 +49,15 @@ ATA_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
value_fn=lambda x: x.device.total_energy_consumed,
|
||||
enabled=lambda x: x.device.has_energy_consumed_meter,
|
||||
),
|
||||
MelcloudSensorEntityDescription(
|
||||
key="outside_temperature",
|
||||
translation_key="outside_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda x: x.device.outdoor_temperature,
|
||||
enabled=lambda x: x.device.has_outdoor_temperature,
|
||||
),
|
||||
)
|
||||
ATW_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
MelcloudSensorEntityDescription(
|
||||
|
||||
@@ -75,7 +75,7 @@ class MelnorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._create_entry(address)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(
|
||||
self.hass, connectable=True
|
||||
):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymiele==0.5.6"],
|
||||
"requirements": ["pymiele==0.6.0"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class MoatConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -66,6 +66,8 @@ from .const import (
|
||||
CONF_BYTESIZE,
|
||||
CONF_CLIMATES,
|
||||
CONF_COLOR_TEMP_REGISTER,
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_SCALE,
|
||||
CONF_DATA_TYPE,
|
||||
CONF_DEVICE_ADDRESS,
|
||||
CONF_FAN_MODE_AUTO,
|
||||
@@ -137,6 +139,8 @@ from .const import (
|
||||
CONF_SWING_MODE_SWING_VERT,
|
||||
CONF_SWING_MODE_VALUES,
|
||||
CONF_TARGET_TEMP,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_TARGET_TEMP_SCALE,
|
||||
CONF_TARGET_TEMP_WRITE_REGISTERS,
|
||||
CONF_VERIFY,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
@@ -159,8 +163,10 @@ from .modbus import DATA_MODBUS_HUBS, ModbusHub, async_modbus_setup
|
||||
from .validators import (
|
||||
duplicate_fan_mode_validator,
|
||||
duplicate_swing_mode_validator,
|
||||
ensure_and_check_conflicting_scales_and_offsets,
|
||||
hvac_fixedsize_reglist_validator,
|
||||
nan_validator,
|
||||
not_zero_value,
|
||||
register_int_list_validator,
|
||||
struct_validator,
|
||||
)
|
||||
@@ -210,8 +216,10 @@ BASE_STRUCT_SCHEMA = BASE_COMPONENT_SCHEMA.extend(
|
||||
]
|
||||
),
|
||||
vol.Optional(CONF_STRUCTURE): cv.string,
|
||||
vol.Optional(CONF_SCALE, default=1): vol.Coerce(float),
|
||||
vol.Optional(CONF_OFFSET, default=0): vol.Coerce(float),
|
||||
vol.Optional(CONF_SCALE): vol.All(
|
||||
vol.Coerce(float), lambda v: not_zero_value(v, "Scale cannot be zero.")
|
||||
),
|
||||
vol.Optional(CONF_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(CONF_PRECISION): cv.positive_int,
|
||||
vol.Optional(
|
||||
CONF_SWAP,
|
||||
@@ -273,6 +281,18 @@ CLIMATE_SCHEMA = vol.All(
|
||||
vol.Optional(CONF_TEMPERATURE_UNIT, default=DEFAULT_TEMP_UNIT): cv.string,
|
||||
vol.Exclusive(CONF_HVAC_ONOFF_COIL, "hvac_onoff_type"): cv.positive_int,
|
||||
vol.Exclusive(CONF_HVAC_ONOFF_REGISTER, "hvac_onoff_type"): cv.positive_int,
|
||||
vol.Optional(CONF_CURRENT_TEMP_SCALE): vol.All(
|
||||
vol.Coerce(float),
|
||||
lambda v: not_zero_value(
|
||||
v, "Current temperature scale cannot be zero."
|
||||
),
|
||||
),
|
||||
vol.Optional(CONF_TARGET_TEMP_SCALE): vol.All(
|
||||
vol.Coerce(float),
|
||||
lambda v: not_zero_value(v, "Target temperature scale cannot be zero."),
|
||||
),
|
||||
vol.Optional(CONF_CURRENT_TEMP_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(CONF_TARGET_TEMP_OFFSET): vol.Coerce(float),
|
||||
vol.Optional(
|
||||
CONF_HVAC_ON_VALUE, default=DEFAULT_HVAC_ON_VALUE
|
||||
): cv.positive_int,
|
||||
@@ -385,6 +405,7 @@ CLIMATE_SCHEMA = vol.All(
|
||||
),
|
||||
},
|
||||
),
|
||||
ensure_and_check_conflicting_scales_and_offsets,
|
||||
)
|
||||
|
||||
COVERS_SCHEMA = BASE_COMPONENT_SCHEMA.extend(
|
||||
|
||||
@@ -50,6 +50,8 @@ from .const import (
|
||||
CALL_TYPE_WRITE_REGISTER,
|
||||
CALL_TYPE_WRITE_REGISTERS,
|
||||
CONF_CLIMATES,
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_SCALE,
|
||||
CONF_FAN_MODE_AUTO,
|
||||
CONF_FAN_MODE_DIFFUSE,
|
||||
CONF_FAN_MODE_FOCUS,
|
||||
@@ -97,8 +99,12 @@ from .const import (
|
||||
CONF_SWING_MODE_SWING_VERT,
|
||||
CONF_SWING_MODE_VALUES,
|
||||
CONF_TARGET_TEMP,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_TARGET_TEMP_SCALE,
|
||||
CONF_TARGET_TEMP_WRITE_REGISTERS,
|
||||
CONF_WRITE_REGISTERS,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
DataType,
|
||||
)
|
||||
from .entity import ModbusStructEntity
|
||||
@@ -166,6 +172,10 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._attr_min_temp = config[CONF_MIN_TEMP]
|
||||
self._attr_max_temp = config[CONF_MAX_TEMP]
|
||||
self._attr_target_temperature_step = config[CONF_STEP]
|
||||
self._current_temp_scale = config[CONF_CURRENT_TEMP_SCALE]
|
||||
self._current_temp_offset = config[CONF_CURRENT_TEMP_OFFSET]
|
||||
self._target_temp_scale = config[CONF_TARGET_TEMP_SCALE]
|
||||
self._target_temp_offset = config[CONF_TARGET_TEMP_OFFSET]
|
||||
|
||||
if CONF_HVAC_MODE_REGISTER in config:
|
||||
mode_config = config[CONF_HVAC_MODE_REGISTER]
|
||||
@@ -413,8 +423,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
target_temperature = (
|
||||
float(kwargs[ATTR_TEMPERATURE]) - self._offset
|
||||
) / self._scale
|
||||
float(kwargs[ATTR_TEMPERATURE]) - self._target_temp_offset
|
||||
) / self._target_temp_scale
|
||||
if self._data_type in (
|
||||
DataType.INT16,
|
||||
DataType.INT32,
|
||||
@@ -472,15 +482,25 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._target_temperature_register[
|
||||
HVACMODE_TO_TARG_TEMP_REG_INDEX_ARRAY[self._attr_hvac_mode]
|
||||
],
|
||||
self._target_temp_scale,
|
||||
self._target_temp_offset,
|
||||
)
|
||||
|
||||
self._attr_current_temperature = await self._async_read_register(
|
||||
self._input_type, self._address
|
||||
self._input_type,
|
||||
self._address,
|
||||
self._current_temp_scale,
|
||||
self._current_temp_offset,
|
||||
)
|
||||
|
||||
# Read the HVAC mode register if defined
|
||||
if self._hvac_mode_register is not None:
|
||||
hvac_mode = await self._async_read_register(
|
||||
CALL_TYPE_REGISTER_HOLDING, self._hvac_mode_register, raw=True
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
self._hvac_mode_register,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
# Translate the value received
|
||||
@@ -499,7 +519,11 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
# Read the HVAC action register if defined
|
||||
if self._hvac_action_register is not None:
|
||||
hvac_action = await self._async_read_register(
|
||||
self._hvac_action_type, self._hvac_action_register, raw=True
|
||||
self._hvac_action_type,
|
||||
self._hvac_action_register,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
# Translate the value received
|
||||
@@ -517,6 +541,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._fan_mode_register
|
||||
if isinstance(self._fan_mode_register, int)
|
||||
else self._fan_mode_register[0],
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
@@ -533,6 +559,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._swing_mode_register
|
||||
if isinstance(self._swing_mode_register, int)
|
||||
else self._swing_mode_register[0],
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
|
||||
@@ -551,7 +579,11 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
# in the mode register.
|
||||
if self._hvac_onoff_register is not None:
|
||||
onoff = await self._async_read_register(
|
||||
CALL_TYPE_REGISTER_HOLDING, self._hvac_onoff_register, raw=True
|
||||
CALL_TYPE_REGISTER_HOLDING,
|
||||
self._hvac_onoff_register,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_OFFSET,
|
||||
raw=True,
|
||||
)
|
||||
if onoff == self._hvac_off_value:
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
@@ -562,7 +594,12 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
|
||||
async def _async_read_register(
|
||||
self, register_type: str, register: int, raw: bool | None = False
|
||||
self,
|
||||
register_type: str,
|
||||
register: int,
|
||||
scale: float,
|
||||
offset: float,
|
||||
raw: bool | None = False,
|
||||
) -> float | None:
|
||||
"""Read register using the Modbus hub slave."""
|
||||
result = await self._hub.async_pb_call(
|
||||
@@ -579,7 +616,7 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
|
||||
return int(result.registers[0])
|
||||
|
||||
# The regular handling of the value
|
||||
self._value = self.unpack_structure_result(result.registers)
|
||||
self._value = self.unpack_structure_result(result.registers, scale, offset)
|
||||
if not self._value:
|
||||
self._attr_available = False
|
||||
return None
|
||||
|
||||
@@ -19,6 +19,8 @@ CONF_BYTESIZE = "bytesize"
|
||||
CONF_CLIMATES = "climates"
|
||||
CONF_BRIGHTNESS_REGISTER = "brightness_address"
|
||||
CONF_COLOR_TEMP_REGISTER = "color_temp_address"
|
||||
CONF_CURRENT_TEMP_OFFSET = "current_temp_offset"
|
||||
CONF_CURRENT_TEMP_SCALE = "current_temp_scale"
|
||||
CONF_DATA_TYPE = "data_type"
|
||||
CONF_DEVICE_ADDRESS = "device_address"
|
||||
CONF_FANS = "fans"
|
||||
@@ -48,6 +50,8 @@ CONF_SWAP_BYTE = "byte"
|
||||
CONF_SWAP_WORD = "word"
|
||||
CONF_SWAP_WORD_BYTE = "word_byte"
|
||||
CONF_TARGET_TEMP = "target_temp_register"
|
||||
CONF_TARGET_TEMP_OFFSET = "target_temp_offset"
|
||||
CONF_TARGET_TEMP_SCALE = "target_temp_scale"
|
||||
CONF_TARGET_TEMP_WRITE_REGISTERS = "target_temp_write_registers"
|
||||
CONF_FAN_MODE_REGISTER = "fan_mode_register"
|
||||
CONF_FAN_MODE_ON = "state_fan_on"
|
||||
@@ -181,4 +185,7 @@ LIGHT_MODBUS_SCALE_MIN = 0
|
||||
LIGHT_MODBUS_SCALE_MAX = 100
|
||||
LIGHT_MODBUS_INVALID_VALUE = 0xFFFF
|
||||
|
||||
DEFAULT_SCALE = 1.0
|
||||
DEFAULT_OFFSET = 0
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
@@ -17,7 +17,6 @@ from homeassistant.const import (
|
||||
CONF_DELAY,
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_NAME,
|
||||
CONF_OFFSET,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_SLAVE,
|
||||
CONF_STRUCTURE,
|
||||
@@ -50,7 +49,6 @@ from .const import (
|
||||
CONF_MIN_VALUE,
|
||||
CONF_NAN_VALUE,
|
||||
CONF_PRECISION,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_STATE_OFF,
|
||||
CONF_STATE_ON,
|
||||
@@ -62,6 +60,8 @@ from .const import (
|
||||
CONF_VIRTUAL_COUNT,
|
||||
CONF_WRITE_TYPE,
|
||||
CONF_ZERO_SUPPRESS,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
SIGNAL_STOP_ENTITY,
|
||||
DataType,
|
||||
)
|
||||
@@ -163,8 +163,6 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
self._swap = config[CONF_SWAP]
|
||||
self._data_type = config[CONF_DATA_TYPE]
|
||||
self._structure: str = config[CONF_STRUCTURE]
|
||||
self._scale = config[CONF_SCALE]
|
||||
self._offset = config[CONF_OFFSET]
|
||||
self._slave_count = config.get(CONF_SLAVE_COUNT) or config.get(
|
||||
CONF_VIRTUAL_COUNT, 0
|
||||
)
|
||||
@@ -181,8 +179,6 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
self._precision = config.get(CONF_PRECISION, 2)
|
||||
else:
|
||||
self._precision = config.get(CONF_PRECISION, 0)
|
||||
if self._precision > 0 or self._scale != int(self._scale):
|
||||
self._value_is_int = False
|
||||
|
||||
def _swap_registers(self, registers: list[int], slave_count: int) -> list[int]:
|
||||
"""Do swap as needed."""
|
||||
@@ -206,7 +202,12 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
registers.reverse()
|
||||
return registers
|
||||
|
||||
def __process_raw_value(self, entry: float | str | bytes) -> str | None:
|
||||
def __process_raw_value(
|
||||
self,
|
||||
entry: float | bytes,
|
||||
scale: float = DEFAULT_SCALE,
|
||||
offset: float = DEFAULT_OFFSET,
|
||||
) -> str | None:
|
||||
"""Process value from sensor with NaN handling, scaling, offset, min/max etc."""
|
||||
if self._nan_value is not None and entry in (self._nan_value, -self._nan_value):
|
||||
return None
|
||||
@@ -215,7 +216,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
if entry != entry: # noqa: PLR0124
|
||||
# NaN float detection replace with None
|
||||
return None
|
||||
val: float | int = self._scale * entry + self._offset
|
||||
val: float | int = scale * entry + offset
|
||||
if self._min_value is not None and val < self._min_value:
|
||||
val = self._min_value
|
||||
if self._max_value is not None and val > self._max_value:
|
||||
@@ -226,7 +227,12 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
return str(round(val))
|
||||
return f"{float(val):.{self._precision}f}"
|
||||
|
||||
def unpack_structure_result(self, registers: list[int]) -> str | None:
|
||||
def unpack_structure_result(
|
||||
self,
|
||||
registers: list[int],
|
||||
scale: float = DEFAULT_SCALE,
|
||||
offset: float = DEFAULT_OFFSET,
|
||||
) -> str | None:
|
||||
"""Convert registers to proper result."""
|
||||
|
||||
if self._swap:
|
||||
@@ -250,7 +256,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
# Apply scale, precision, limits to floats and ints
|
||||
v_result = []
|
||||
for entry in val:
|
||||
v_temp = self.__process_raw_value(entry)
|
||||
v_temp = self.__process_raw_value(entry, scale, offset)
|
||||
if self._data_type != DataType.CUSTOM:
|
||||
v_result.append(str(v_temp))
|
||||
else:
|
||||
@@ -258,7 +264,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
return ",".join(map(str, v_result))
|
||||
|
||||
# Apply scale, precision, limits to floats and ints
|
||||
return self.__process_raw_value(val[0])
|
||||
return self.__process_raw_value(val[0], scale, offset)
|
||||
|
||||
|
||||
class ModbusToggleEntity(ModbusBaseEntity, ToggleEntity, RestoreEntity):
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_NAME,
|
||||
CONF_OFFSET,
|
||||
CONF_SENSORS,
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
@@ -25,7 +26,14 @@ from homeassistant.helpers.update_coordinator import (
|
||||
)
|
||||
|
||||
from . import get_hub
|
||||
from .const import _LOGGER, CONF_SLAVE_COUNT, CONF_VIRTUAL_COUNT
|
||||
from .const import (
|
||||
_LOGGER,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
)
|
||||
from .entity import ModbusStructEntity
|
||||
from .modbus import ModbusHub
|
||||
|
||||
@@ -73,9 +81,13 @@ class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
|
||||
self._coordinator: DataUpdateCoordinator[list[float | None] | None] | None = (
|
||||
None
|
||||
)
|
||||
self._scale = entry.get(CONF_SCALE, DEFAULT_SCALE)
|
||||
self._offset = entry.get(CONF_OFFSET, DEFAULT_OFFSET)
|
||||
self._attr_native_unit_of_measurement = entry.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
self._attr_state_class = entry.get(CONF_STATE_CLASS)
|
||||
self._attr_device_class = entry.get(CONF_DEVICE_CLASS)
|
||||
if self._precision > 0 or self._scale != int(self._scale):
|
||||
self._value_is_int = False
|
||||
|
||||
async def async_setup_slaves(
|
||||
self, hass: HomeAssistant, slave_count: int, entry: dict[str, Any]
|
||||
@@ -117,7 +129,9 @@ class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
self._attr_available = True
|
||||
result = self.unpack_structure_result(raw_result.registers)
|
||||
result = self.unpack_structure_result(
|
||||
raw_result.registers, self._scale, self._offset
|
||||
)
|
||||
if self._coordinator:
|
||||
result_array: list[float | None] = []
|
||||
if result:
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.const import (
|
||||
CONF_COUNT,
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_OFFSET,
|
||||
CONF_PORT,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_STRUCTURE,
|
||||
@@ -25,16 +26,23 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import (
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_SCALE,
|
||||
CONF_DATA_TYPE,
|
||||
CONF_FAN_MODE_VALUES,
|
||||
CONF_SCALE,
|
||||
CONF_SLAVE_COUNT,
|
||||
CONF_SWAP,
|
||||
CONF_SWAP_BYTE,
|
||||
CONF_SWAP_WORD,
|
||||
CONF_SWAP_WORD_BYTE,
|
||||
CONF_SWING_MODE_VALUES,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_TARGET_TEMP_SCALE,
|
||||
CONF_VIRTUAL_COUNT,
|
||||
DEFAULT_HUB,
|
||||
DEFAULT_OFFSET,
|
||||
DEFAULT_SCALE,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
@@ -243,6 +251,46 @@ def duplicate_fan_mode_validator(config: dict[str, Any]) -> dict:
|
||||
return config
|
||||
|
||||
|
||||
def not_zero_value(val: float, errMsg: str) -> float:
|
||||
"""Check value is not zero."""
|
||||
if val == 0:
|
||||
raise vol.Invalid(errMsg)
|
||||
return val
|
||||
|
||||
|
||||
def ensure_and_check_conflicting_scales_and_offsets(config: dict[str, Any]) -> dict:
|
||||
"""Check for conflicts in scale/offset and ensure target/current temp scale/offset is set."""
|
||||
config_keys = [
|
||||
(CONF_SCALE, CONF_TARGET_TEMP_SCALE, CONF_CURRENT_TEMP_SCALE, DEFAULT_SCALE),
|
||||
(
|
||||
CONF_OFFSET,
|
||||
CONF_TARGET_TEMP_OFFSET,
|
||||
CONF_CURRENT_TEMP_OFFSET,
|
||||
DEFAULT_OFFSET,
|
||||
),
|
||||
]
|
||||
|
||||
for generic_key, target_key, current_key, default_value in config_keys:
|
||||
if generic_key in config and (target_key in config or current_key in config):
|
||||
raise vol.Invalid(
|
||||
f"Cannot use both '{generic_key}' and temperature-specific parameters "
|
||||
f"('{target_key}' or '{current_key}') in the same configuration. "
|
||||
f"Either the '{generic_key}' parameter (which applies to both temperatures) "
|
||||
"or the new temperature-specific parameters, but not both."
|
||||
)
|
||||
if generic_key in config:
|
||||
value = config.pop(generic_key)
|
||||
config[target_key] = value
|
||||
config[current_key] = value
|
||||
|
||||
if target_key not in config:
|
||||
config[target_key] = default_value
|
||||
if current_key not in config:
|
||||
config[current_key] = default_value
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def duplicate_swing_mode_validator(config: dict[str, Any]) -> dict:
|
||||
"""Control modbus climate swing mode values for duplicates."""
|
||||
swing_modes: set[int] = set()
|
||||
|
||||
@@ -238,12 +238,14 @@ async def _client_listen(
|
||||
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: MusicAssistantConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
if unload_ok:
|
||||
mass_entry_data: MusicAssistantEntryData = entry.runtime_data
|
||||
mass_entry_data = entry.runtime_data
|
||||
mass_entry_data.listen_task.cancel()
|
||||
await mass_entry_data.mass.disconnect()
|
||||
|
||||
@@ -251,7 +253,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
|
||||
hass: HomeAssistant,
|
||||
config_entry: MusicAssistantConfigEntry,
|
||||
device_entry: dr.DeviceEntry,
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
player_id = next(
|
||||
|
||||
@@ -115,6 +115,13 @@ QUEUE_OPTION_MAP = {
|
||||
MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE,
|
||||
}
|
||||
|
||||
REPEAT_MODE_MAPPING_TO_HA = {
|
||||
MassRepeatMode.OFF: RepeatMode.OFF,
|
||||
MassRepeatMode.ONE: RepeatMode.ONE,
|
||||
MassRepeatMode.ALL: RepeatMode.ALL,
|
||||
# UNKNOWN is intentionally not mapped - will return None
|
||||
}
|
||||
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
@@ -657,7 +664,7 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
# player has an MA queue active (either its own queue or some group queue)
|
||||
self._attr_app_id = DOMAIN
|
||||
self._attr_shuffle = queue.shuffle_enabled
|
||||
self._attr_repeat = queue.repeat_mode.value
|
||||
self._attr_repeat = REPEAT_MODE_MAPPING_TO_HA.get(queue.repeat_mode)
|
||||
if not (cur_item := queue.current_item):
|
||||
# queue is empty
|
||||
return
|
||||
|
||||
@@ -19,7 +19,11 @@ from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL
|
||||
from .coordinator import NASwebCoordinator
|
||||
from .nasweb_data import NASwebData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
NASWEB_CONFIG_URL = "https://{host}/page"
|
||||
|
||||
|
||||
154
homeassistant/components/nasweb/alarm_control_panel.py
Normal file
154
homeassistant/components/nasweb/alarm_control_panel.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""Platform for NASweb alarms."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from webio_api import Zone as NASwebZone
|
||||
from webio_api.const import STATE_ZONE_ALARM, STATE_ZONE_ARMED, STATE_ZONE_DISARMED
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
DOMAIN as DOMAIN_ALARM_CONTROL_PANEL,
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
BaseDataUpdateCoordinatorProtocol,
|
||||
)
|
||||
|
||||
from . import NASwebConfigEntry
|
||||
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ALARM_CONTROL_PANEL_TRANSLATION_KEY = "zone"
|
||||
|
||||
NASWEB_STATE_TO_HA_STATE = {
|
||||
STATE_ZONE_ALARM: AlarmControlPanelState.TRIGGERED,
|
||||
STATE_ZONE_ARMED: AlarmControlPanelState.ARMED_AWAY,
|
||||
STATE_ZONE_DISARMED: AlarmControlPanelState.DISARMED,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: NASwebConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up alarm control panel platform."""
|
||||
coordinator = config.runtime_data
|
||||
current_zones: set[int] = set()
|
||||
|
||||
@callback
|
||||
def _check_entities() -> None:
|
||||
received_zones: dict[int, NASwebZone] = {
|
||||
entry.index: entry for entry in coordinator.webio_api.zones
|
||||
}
|
||||
added = {i for i in received_zones if i not in current_zones}
|
||||
removed = {i for i in current_zones if i not in received_zones}
|
||||
entities_to_add: list[ZoneEntity] = []
|
||||
for index in added:
|
||||
webio_zone = received_zones[index]
|
||||
if not isinstance(webio_zone, NASwebZone):
|
||||
_LOGGER.error("Cannot create ZoneEntity without NASwebZone")
|
||||
continue
|
||||
new_zone = ZoneEntity(coordinator, webio_zone)
|
||||
entities_to_add.append(new_zone)
|
||||
current_zones.add(index)
|
||||
async_add_entities(entities_to_add)
|
||||
entity_registry = er.async_get(hass)
|
||||
for index in removed:
|
||||
unique_id = f"{DOMAIN}.{config.unique_id}.zone.{index}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
DOMAIN_ALARM_CONTROL_PANEL, DOMAIN, unique_id
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
current_zones.remove(index)
|
||||
else:
|
||||
_LOGGER.warning("Failed to remove old zone: no entity_id")
|
||||
|
||||
coordinator.async_add_listener(_check_entities)
|
||||
_check_entities()
|
||||
|
||||
|
||||
class ZoneEntity(AlarmControlPanelEntity, BaseCoordinatorEntity):
|
||||
"""Entity representing NASweb zone."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_translation_key = ALARM_CONTROL_PANEL_TRANSLATION_KEY
|
||||
|
||||
def __init__(
|
||||
self, coordinator: BaseDataUpdateCoordinatorProtocol, nasweb_zone: NASwebZone
|
||||
) -> None:
|
||||
"""Initialize zone entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone = nasweb_zone
|
||||
self._attr_name = nasweb_zone.name
|
||||
self._attr_translation_placeholders = {"index": f"{nasweb_zone.index:2d}"}
|
||||
self._attr_unique_id = (
|
||||
f"{DOMAIN}.{self._zone.webio_serial}.zone.{self._zone.index}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._zone.webio_serial)},
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
|
||||
def _set_attr_available(
|
||||
self, entity_last_update: float, available: bool | None
|
||||
) -> None:
|
||||
if (
|
||||
self.coordinator.last_update is None
|
||||
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = available if available is not None else False
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_alarm_state = NASWEB_STATE_TO_HA_STATE[self._zone.state]
|
||||
if self._zone.pass_type == 0:
|
||||
self._attr_code_format = CodeFormat.TEXT
|
||||
elif self._zone.pass_type == 1:
|
||||
self._attr_code_format = CodeFormat.NUMBER
|
||||
else:
|
||||
self._attr_code_format = None
|
||||
self._attr_code_arm_required = self._attr_code_format is not None
|
||||
|
||||
self._set_attr_available(self._zone.last_update, self._zone.available)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||
"""
|
||||
|
||||
@property
|
||||
def supported_features(self) -> AlarmControlPanelEntityFeature:
|
||||
"""Return the list of supported features."""
|
||||
return AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Arm away ZoneEntity."""
|
||||
await self._zone.arm(code)
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Disarm ZoneEntity."""
|
||||
await self._zone.disarm(code)
|
||||
@@ -23,6 +23,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
KEY_INPUTS = "inputs"
|
||||
KEY_OUTPUTS = "outputs"
|
||||
KEY_ZONES = "zones"
|
||||
|
||||
|
||||
class NotificationCoordinator:
|
||||
@@ -103,6 +104,7 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(data)
|
||||
|
||||
@@ -197,5 +199,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(new_data)
|
||||
|
||||
@@ -24,6 +24,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"alarm_control_panel": {
|
||||
"zone": {
|
||||
"name": "Zone {index}"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"sensor_input": {
|
||||
"name": "Input {index}",
|
||||
|
||||
76
homeassistant/components/neato/__init__.py
Normal file
76
homeassistant/components/neato/__init__.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Support for Neato botvac connected vacuum cleaners."""
|
||||
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pybotvac import Account
|
||||
from pybotvac.exceptions import NeatoException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
from .const import NEATO_DOMAIN, NEATO_LOGIN
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.CAMERA,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up config entry."""
|
||||
hass.data.setdefault(NEATO_DOMAIN, {})
|
||||
if CONF_TOKEN not in entry.data:
|
||||
raise ConfigEntryAuthFailed
|
||||
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
try:
|
||||
await session.async_ensure_token_valid()
|
||||
except aiohttp.ClientResponseError as ex:
|
||||
_LOGGER.debug("API error: %s (%s)", ex.code, ex.message)
|
||||
if ex.code in (401, 403):
|
||||
raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
neato_session = api.ConfigEntryAuth(hass, entry, implementation)
|
||||
hass.data[NEATO_DOMAIN][entry.entry_id] = neato_session
|
||||
hub = NeatoHub(hass, Account(neato_session))
|
||||
|
||||
await hub.async_update_entry_unique_id(entry)
|
||||
|
||||
try:
|
||||
await hass.async_add_executor_job(hub.update_robots)
|
||||
except NeatoException as ex:
|
||||
_LOGGER.debug("Failed to connect to Neato API")
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
hass.data[NEATO_LOGIN] = hub
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[NEATO_DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
58
homeassistant/components/neato/api.py
Normal file
58
homeassistant/components/neato/api.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""API for Neato Botvac bound to Home Assistant OAuth."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import run_coroutine_threadsafe
|
||||
from typing import Any
|
||||
|
||||
import pybotvac
|
||||
|
||||
from homeassistant import config_entries, core
|
||||
from homeassistant.components.application_credentials import AuthImplementation
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
|
||||
class ConfigEntryAuth(pybotvac.OAuthSession): # type: ignore[misc]
|
||||
"""Provide Neato Botvac authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: core.HomeAssistant,
|
||||
config_entry: config_entries.ConfigEntry,
|
||||
implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation,
|
||||
) -> None:
|
||||
"""Initialize Neato Botvac Auth."""
|
||||
self.hass = hass
|
||||
self.session = config_entry_oauth2_flow.OAuth2Session(
|
||||
hass, config_entry, implementation
|
||||
)
|
||||
super().__init__(self.session.token, vendor=pybotvac.Neato())
|
||||
|
||||
def refresh_tokens(self) -> str:
|
||||
"""Refresh and return new Neato Botvac tokens."""
|
||||
run_coroutine_threadsafe(
|
||||
self.session.async_ensure_token_valid(), self.hass.loop
|
||||
).result()
|
||||
|
||||
return self.session.token["access_token"] # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class NeatoImplementation(AuthImplementation):
|
||||
"""Neato implementation of LocalOAuth2Implementation.
|
||||
|
||||
We need this class because we have to add client_secret
|
||||
and scope to the authorization request.
|
||||
"""
|
||||
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict[str, Any]:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
return {"client_secret": self.client_secret}
|
||||
|
||||
async def async_generate_authorize_url(self, flow_id: str) -> str:
|
||||
"""Generate a url for the user to authorize.
|
||||
|
||||
We must make sure that the plus signs are not encoded.
|
||||
"""
|
||||
url = await super().async_generate_authorize_url(flow_id)
|
||||
return f"{url}&scope=public_profile+control_robots+maps"
|
||||
28
homeassistant/components/neato/application_credentials.py
Normal file
28
homeassistant/components/neato/application_credentials.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Application credentials platform for neato."""
|
||||
|
||||
from pybotvac import Neato
|
||||
|
||||
from homeassistant.components.application_credentials import (
|
||||
AuthorizationServer,
|
||||
ClientCredential,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from . import api
|
||||
|
||||
|
||||
async def async_get_auth_implementation(
|
||||
hass: HomeAssistant, auth_domain: str, credential: ClientCredential
|
||||
) -> config_entry_oauth2_flow.AbstractOAuth2Implementation:
|
||||
"""Return auth implementation for a custom auth implementation."""
|
||||
vendor = Neato()
|
||||
return api.NeatoImplementation(
|
||||
hass,
|
||||
auth_domain,
|
||||
credential,
|
||||
AuthorizationServer(
|
||||
authorize_url=vendor.auth_endpoint,
|
||||
token_url=vendor.token_endpoint,
|
||||
),
|
||||
)
|
||||
44
homeassistant/components/neato/button.py
Normal file
44
homeassistant/components/neato/button.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Support for Neato buttons."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pybotvac import Robot
|
||||
|
||||
from homeassistant.components.button import ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NEATO_ROBOTS
|
||||
from .entity import NeatoEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Neato button from config entry."""
|
||||
entities = [NeatoDismissAlertButton(robot) for robot in hass.data[NEATO_ROBOTS]]
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class NeatoDismissAlertButton(NeatoEntity, ButtonEntity):
|
||||
"""Representation of a dismiss_alert button entity."""
|
||||
|
||||
_attr_translation_key = "dismiss_alert"
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
robot: Robot,
|
||||
) -> None:
|
||||
"""Initialize a dismiss_alert Neato button entity."""
|
||||
super().__init__(robot)
|
||||
self._attr_unique_id = f"{robot.serial}_dismiss_alert"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
await self.hass.async_add_executor_job(self.robot.dismiss_current_alert)
|
||||
130
homeassistant/components/neato/camera.py
Normal file
130
homeassistant/components/neato/camera.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Support for loading picture from Neato."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pybotvac.exceptions import NeatoRobotException
|
||||
from pybotvac.robot import Robot
|
||||
from urllib3.response import HTTPResponse
|
||||
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import NEATO_LOGIN, NEATO_MAP_DATA, NEATO_ROBOTS, SCAN_INTERVAL_MINUTES
|
||||
from .entity import NeatoEntity
|
||||
from .hub import NeatoHub
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
|
||||
ATTR_GENERATED_AT = "generated_at"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Neato camera with config entry."""
|
||||
neato: NeatoHub = hass.data[NEATO_LOGIN]
|
||||
mapdata: dict[str, Any] | None = hass.data.get(NEATO_MAP_DATA)
|
||||
dev = [
|
||||
NeatoCleaningMap(neato, robot, mapdata)
|
||||
for robot in hass.data[NEATO_ROBOTS]
|
||||
if "maps" in robot.traits
|
||||
]
|
||||
|
||||
if not dev:
|
||||
return
|
||||
|
||||
_LOGGER.debug("Adding robots for cleaning maps %s", dev)
|
||||
async_add_entities(dev, True)
|
||||
|
||||
|
||||
class NeatoCleaningMap(NeatoEntity, Camera):
|
||||
"""Neato cleaning map for last clean."""
|
||||
|
||||
_attr_translation_key = "cleaning_map"
|
||||
|
||||
def __init__(
|
||||
self, neato: NeatoHub, robot: Robot, mapdata: dict[str, Any] | None
|
||||
) -> None:
|
||||
"""Initialize Neato cleaning map."""
|
||||
super().__init__(robot)
|
||||
Camera.__init__(self)
|
||||
self.neato = neato
|
||||
self._mapdata = mapdata
|
||||
self._available = neato is not None
|
||||
self._robot_serial: str = self.robot.serial
|
||||
self._attr_unique_id = self.robot.serial
|
||||
self._generated_at: str | None = None
|
||||
self._image_url: str | None = None
|
||||
self._image: bytes | None = None
|
||||
|
||||
def camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return image response."""
|
||||
self.update()
|
||||
return self._image
|
||||
|
||||
def update(self) -> None:
|
||||
"""Check the contents of the map list."""
|
||||
|
||||
_LOGGER.debug("Running camera update for '%s'", self.entity_id)
|
||||
try:
|
||||
self.neato.update_robots()
|
||||
except NeatoRobotException as ex:
|
||||
if self._available: # Print only once when available
|
||||
_LOGGER.error(
|
||||
"Neato camera connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._image = None
|
||||
self._image_url = None
|
||||
self._available = False
|
||||
return
|
||||
|
||||
if self._mapdata:
|
||||
map_data: dict[str, Any] = self._mapdata[self._robot_serial]["maps"][0]
|
||||
if (image_url := map_data["url"]) == self._image_url:
|
||||
_LOGGER.debug(
|
||||
"The map image_url for '%s' is the same as old", self.entity_id
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
image: HTTPResponse = self.neato.download_map(image_url)
|
||||
except NeatoRobotException as ex:
|
||||
if self._available: # Print only once when available
|
||||
_LOGGER.error(
|
||||
"Neato camera connection error for '%s': %s", self.entity_id, ex
|
||||
)
|
||||
self._image = None
|
||||
self._image_url = None
|
||||
self._available = False
|
||||
return
|
||||
|
||||
self._image = image.read()
|
||||
self._image_url = image_url
|
||||
self._generated_at = map_data.get("generated_at")
|
||||
self._available = True
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the robot is available."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the vacuum cleaner."""
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
if self._generated_at is not None:
|
||||
data[ATTR_GENERATED_AT] = self._generated_at
|
||||
|
||||
return data
|
||||
64
homeassistant/components/neato/config_flow.py
Normal file
64
homeassistant/components/neato/config_flow.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Config flow for Neato Botvac."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
from .const import NEATO_DOMAIN
|
||||
|
||||
|
||||
class OAuth2FlowHandler(
|
||||
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=NEATO_DOMAIN
|
||||
):
|
||||
"""Config flow to handle Neato Botvac OAuth2 authentication."""
|
||||
|
||||
DOMAIN = NEATO_DOMAIN
|
||||
|
||||
@property
|
||||
def logger(self) -> logging.Logger:
|
||||
"""Return logger."""
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
current_entries = self._async_current_entries()
|
||||
if self.source != SOURCE_REAUTH and current_entries:
|
||||
# Already configured
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
return await super().async_step_user(user_input=user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon migration of old entries."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth upon migration of old entries."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="reauth_confirm")
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow. Update an entry if one already exist."""
|
||||
current_entries = self._async_current_entries()
|
||||
if self.source == SOURCE_REAUTH and current_entries:
|
||||
# Update entry
|
||||
self.hass.config_entries.async_update_entry(
|
||||
current_entries[0], title=self.flow_impl.name, data=data
|
||||
)
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(current_entries[0].entry_id)
|
||||
)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
return self.async_create_entry(title=self.flow_impl.name, data=data)
|
||||
150
homeassistant/components/neato/const.py
Normal file
150
homeassistant/components/neato/const.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Constants for Neato integration."""
|
||||
|
||||
NEATO_DOMAIN = "neato"
|
||||
|
||||
CONF_VENDOR = "vendor"
|
||||
NEATO_LOGIN = "neato_login"
|
||||
NEATO_MAP_DATA = "neato_map_data"
|
||||
NEATO_PERSISTENT_MAPS = "neato_persistent_maps"
|
||||
NEATO_ROBOTS = "neato_robots"
|
||||
|
||||
SCAN_INTERVAL_MINUTES = 1
|
||||
|
||||
MODE = {1: "Eco", 2: "Turbo"}
|
||||
|
||||
ACTION = {
|
||||
0: "Invalid",
|
||||
1: "House Cleaning",
|
||||
2: "Spot Cleaning",
|
||||
3: "Manual Cleaning",
|
||||
4: "Docking",
|
||||
5: "User Menu Active",
|
||||
6: "Suspended Cleaning",
|
||||
7: "Updating",
|
||||
8: "Copying logs",
|
||||
9: "Recovering Location",
|
||||
10: "IEC test",
|
||||
11: "Map cleaning",
|
||||
12: "Exploring map (creating a persistent map)",
|
||||
13: "Acquiring Persistent Map IDs",
|
||||
14: "Creating & Uploading Map",
|
||||
15: "Suspended Exploration",
|
||||
}
|
||||
|
||||
ERRORS = {
|
||||
"ui_error_battery_battundervoltlithiumsafety": "Replace battery",
|
||||
"ui_error_battery_critical": "Replace battery",
|
||||
"ui_error_battery_invalidsensor": "Replace battery",
|
||||
"ui_error_battery_lithiumadapterfailure": "Replace battery",
|
||||
"ui_error_battery_mismatch": "Replace battery",
|
||||
"ui_error_battery_nothermistor": "Replace battery",
|
||||
"ui_error_battery_overtemp": "Replace battery",
|
||||
"ui_error_battery_overvolt": "Replace battery",
|
||||
"ui_error_battery_undercurrent": "Replace battery",
|
||||
"ui_error_battery_undertemp": "Replace battery",
|
||||
"ui_error_battery_undervolt": "Replace battery",
|
||||
"ui_error_battery_unplugged": "Replace battery",
|
||||
"ui_error_brush_stuck": "Brush stuck",
|
||||
"ui_error_brush_overloaded": "Brush overloaded",
|
||||
"ui_error_bumper_stuck": "Bumper stuck",
|
||||
"ui_error_check_battery_switch": "Check battery",
|
||||
"ui_error_corrupt_scb": "Call customer service corrupt board",
|
||||
"ui_error_deck_debris": "Deck debris",
|
||||
"ui_error_dflt_app": "Check Neato app",
|
||||
"ui_error_disconnect_chrg_cable": "Disconnected charge cable",
|
||||
"ui_error_disconnect_usb_cable": "Disconnected USB cable",
|
||||
"ui_error_dust_bin_missing": "Dust bin missing",
|
||||
"ui_error_dust_bin_full": "Dust bin full",
|
||||
"ui_error_dust_bin_emptied": "Dust bin emptied",
|
||||
"ui_error_hardware_failure": "Hardware failure",
|
||||
"ui_error_ldrop_stuck": "Clear my path",
|
||||
"ui_error_lds_jammed": "Clear my path",
|
||||
"ui_error_lds_bad_packets": "Check Neato app",
|
||||
"ui_error_lds_disconnected": "Check Neato app",
|
||||
"ui_error_lds_missed_packets": "Check Neato app",
|
||||
"ui_error_lwheel_stuck": "Clear my path",
|
||||
"ui_error_navigation_backdrop_frontbump": "Clear my path",
|
||||
"ui_error_navigation_backdrop_leftbump": "Clear my path",
|
||||
"ui_error_navigation_backdrop_wheelextended": "Clear my path",
|
||||
"ui_error_navigation_noprogress": "Clear my path",
|
||||
"ui_error_navigation_origin_unclean": "Clear my path",
|
||||
"ui_error_navigation_pathproblems": "Cannot return to base",
|
||||
"ui_error_navigation_pinkycommsfail": "Clear my path",
|
||||
"ui_error_navigation_falling": "Clear my path",
|
||||
"ui_error_navigation_noexitstogo": "Clear my path",
|
||||
"ui_error_navigation_nomotioncommands": "Clear my path",
|
||||
"ui_error_navigation_rightdrop_leftbump": "Clear my path",
|
||||
"ui_error_navigation_undockingfailed": "Clear my path",
|
||||
"ui_error_picked_up": "Picked up",
|
||||
"ui_error_qa_fail": "Check Neato app",
|
||||
"ui_error_rdrop_stuck": "Clear my path",
|
||||
"ui_error_reconnect_failed": "Reconnect failed",
|
||||
"ui_error_rwheel_stuck": "Clear my path",
|
||||
"ui_error_stuck": "Stuck!",
|
||||
"ui_error_unable_to_return_to_base": "Unable to return to base",
|
||||
"ui_error_unable_to_see": "Clean vacuum sensors",
|
||||
"ui_error_vacuum_slip": "Clear my path",
|
||||
"ui_error_vacuum_stuck": "Clear my path",
|
||||
"ui_error_warning": "Error check app",
|
||||
"batt_base_connect_fail": "Battery failed to connect to base",
|
||||
"batt_base_no_power": "Battery base has no power",
|
||||
"batt_low": "Battery low",
|
||||
"batt_on_base": "Battery on base",
|
||||
"clean_tilt_on_start": "Clean the tilt on start",
|
||||
"dustbin_full": "Dust bin full",
|
||||
"dustbin_missing": "Dust bin missing",
|
||||
"gen_picked_up": "Picked up",
|
||||
"hw_fail": "Hardware failure",
|
||||
"hw_tof_sensor_sensor": "Hardware sensor disconnected",
|
||||
"lds_bad_packets": "Bad packets",
|
||||
"lds_deck_debris": "Debris on deck",
|
||||
"lds_disconnected": "Disconnected",
|
||||
"lds_jammed": "Jammed",
|
||||
"lds_missed_packets": "Missed packets",
|
||||
"maint_brush_stuck": "Brush stuck",
|
||||
"maint_brush_overload": "Brush overloaded",
|
||||
"maint_bumper_stuck": "Bumper stuck",
|
||||
"maint_customer_support_qa": "Contact customer support",
|
||||
"maint_vacuum_stuck": "Vacuum is stuck",
|
||||
"maint_vacuum_slip": "Vacuum is stuck",
|
||||
"maint_left_drop_stuck": "Vacuum is stuck",
|
||||
"maint_left_wheel_stuck": "Vacuum is stuck",
|
||||
"maint_right_drop_stuck": "Vacuum is stuck",
|
||||
"maint_right_wheel_stuck": "Vacuum is stuck",
|
||||
"not_on_charge_base": "Not on the charge base",
|
||||
"nav_robot_falling": "Clear my path",
|
||||
"nav_no_path": "Clear my path",
|
||||
"nav_path_problem": "Clear my path",
|
||||
"nav_backdrop_frontbump": "Clear my path",
|
||||
"nav_backdrop_leftbump": "Clear my path",
|
||||
"nav_backdrop_wheelextended": "Clear my path",
|
||||
"nav_floorplan_zone_path_blocked": "Clear my path",
|
||||
"nav_mag_sensor": "Clear my path",
|
||||
"nav_no_exit": "Clear my path",
|
||||
"nav_no_movement": "Clear my path",
|
||||
"nav_rightdrop_leftbump": "Clear my path",
|
||||
"nav_undocking_failed": "Clear my path",
|
||||
}
|
||||
|
||||
ALERTS = {
|
||||
"ui_alert_dust_bin_full": "Please empty dust bin",
|
||||
"ui_alert_recovering_location": "Returning to start",
|
||||
"ui_alert_battery_chargebasecommerr": "Battery error",
|
||||
"ui_alert_busy_charging": "Busy charging",
|
||||
"ui_alert_charging_base": "Base charging",
|
||||
"ui_alert_charging_power": "Charging power",
|
||||
"ui_alert_connect_chrg_cable": "Connect charge cable",
|
||||
"ui_alert_info_thank_you": "Thank you",
|
||||
"ui_alert_invalid": "Invalid check app",
|
||||
"ui_alert_old_error": "Old error",
|
||||
"ui_alert_swupdate_fail": "Update failed",
|
||||
"dustbin_full": "Please empty dust bin",
|
||||
"maint_brush_change": "Change the brush",
|
||||
"maint_filter_change": "Change the filter",
|
||||
"clean_completed_to_start": "Cleaning completed",
|
||||
"nav_floorplan_not_created": "No floorplan found",
|
||||
"nav_floorplan_load_fail": "Failed to load floorplan",
|
||||
"nav_floorplan_localization_fail": "Failed to load floorplan",
|
||||
"clean_incomplete_to_start": "Cleaning incomplete",
|
||||
"log_upload_failed": "Logs failed to upload",
|
||||
}
|
||||
24
homeassistant/components/neato/entity.py
Normal file
24
homeassistant/components/neato/entity.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""Base entity for Neato."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pybotvac import Robot
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import NEATO_DOMAIN
|
||||
|
||||
|
||||
class NeatoEntity(Entity):
|
||||
"""Base Neato entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, robot: Robot) -> None:
|
||||
"""Initialize Neato entity."""
|
||||
self.robot = robot
|
||||
self._attr_device_info: DeviceInfo = DeviceInfo(
|
||||
identifiers={(NEATO_DOMAIN, self.robot.serial)},
|
||||
name=self.robot.name,
|
||||
)
|
||||
50
homeassistant/components/neato/hub.py
Normal file
50
homeassistant/components/neato/hub.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Support for Neato botvac connected vacuum cleaners."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pybotvac import Account
|
||||
from urllib3.response import HTTPResponse
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import NEATO_MAP_DATA, NEATO_PERSISTENT_MAPS, NEATO_ROBOTS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NeatoHub:
|
||||
"""A My Neato hub wrapper class."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, neato: Account) -> None:
|
||||
"""Initialize the Neato hub."""
|
||||
self._hass = hass
|
||||
self.my_neato: Account = neato
|
||||
|
||||
@Throttle(timedelta(minutes=1))
|
||||
def update_robots(self) -> None:
|
||||
"""Update the robot states."""
|
||||
_LOGGER.debug("Running HUB.update_robots %s", self._hass.data.get(NEATO_ROBOTS))
|
||||
self._hass.data[NEATO_ROBOTS] = self.my_neato.robots
|
||||
self._hass.data[NEATO_PERSISTENT_MAPS] = self.my_neato.persistent_maps
|
||||
self._hass.data[NEATO_MAP_DATA] = self.my_neato.maps
|
||||
|
||||
def download_map(self, url: str) -> HTTPResponse:
|
||||
"""Download a new map image."""
|
||||
map_image_data: HTTPResponse = self.my_neato.get_map_image(url)
|
||||
return map_image_data
|
||||
|
||||
async def async_update_entry_unique_id(self, entry: ConfigEntry) -> str:
|
||||
"""Update entry for unique_id."""
|
||||
|
||||
await self._hass.async_add_executor_job(self.my_neato.refresh_userdata)
|
||||
unique_id: str = self.my_neato.unique_id
|
||||
|
||||
if entry.unique_id == unique_id:
|
||||
return unique_id
|
||||
|
||||
_LOGGER.debug("Updating user unique_id for previous config entry")
|
||||
self._hass.config_entries.async_update_entry(entry, unique_id=unique_id)
|
||||
return unique_id
|
||||
7
homeassistant/components/neato/icons.json
Normal file
7
homeassistant/components/neato/icons.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"services": {
|
||||
"custom_cleaning": {
|
||||
"service": "mdi:broom"
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user