mirror of
https://github.com/home-assistant/core.git
synced 2025-11-09 02:49:40 +00:00
Compare commits
3 Commits
mfa_notify
...
cursor/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b2fe77b7f5 | ||
|
|
d984e4398e | ||
|
|
75bd1a0310 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -88,10 +88,6 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -362,7 +362,6 @@ homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
homeassistant.components.network.*
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -1539,8 +1539,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/suez_water/ @ooii @jb101010-2
|
||||
/homeassistant/components/sun/ @home-assistant/core
|
||||
/tests/components/sun/ @home-assistant/core
|
||||
/homeassistant/components/sunricher_dali/ @niracler
|
||||
/tests/components/sunricher_dali/ @niracler
|
||||
/homeassistant/components/sunricher_dali_center/ @niracler
|
||||
/tests/components/sunricher_dali_center/ @niracler
|
||||
/homeassistant/components/supla/ @mwegrzynek
|
||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||
@@ -1817,8 +1817,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -6,6 +6,7 @@ Sending HOTP through notify service
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -303,14 +304,13 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
if not self._available_notify_services:
|
||||
return self.async_abort(reason="no_available_service")
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required("notify_service"): vol.In(self._available_notify_services),
|
||||
vol.Optional("target"): str,
|
||||
}
|
||||
)
|
||||
schema: dict[str, Any] = OrderedDict()
|
||||
schema["notify_service"] = vol.In(self._available_notify_services)
|
||||
schema["target"] = vol.Optional(str)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
||||
return self.async_show_form(
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
async def async_step_setup(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
|
||||
@@ -179,18 +179,12 @@ class Data:
|
||||
user_hash = base64.b64decode(found["password"])
|
||||
|
||||
# bcrypt.checkpw is timing-safe
|
||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
# Previously the password was silently truncated.
|
||||
# https://github.com/pyca/bcrypt/pull/1000
|
||||
if not bcrypt.checkpw(password.encode()[:72], user_hash):
|
||||
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||
raise InvalidAuth
|
||||
|
||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||
"""Encode a password."""
|
||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
||||
# Previously the password was silently truncated.
|
||||
# https://github.com/pyca/bcrypt/pull/1000
|
||||
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
|
||||
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||
|
||||
if for_storage:
|
||||
hashed = base64.b64encode(hashed)
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale (non-US/Canada)",
|
||||
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "yale_august",
|
||||
"name": "Yale August (US/Canada)",
|
||||
"integrations": ["august", "august_ble"]
|
||||
}
|
||||
@@ -2,16 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfEnergy, UnitOfTemperature
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -22,74 +20,44 @@ from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AdaxSensorDescription(SensorEntityDescription):
|
||||
"""Describes Adax sensor entity."""
|
||||
|
||||
data_key: str
|
||||
|
||||
|
||||
SENSORS: tuple[AdaxSensorDescription, ...] = (
|
||||
AdaxSensorDescription(
|
||||
key="temperature",
|
||||
data_key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
AdaxSensorDescription(
|
||||
key="energy",
|
||||
data_key="energyWh",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=3,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdaxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax sensors with config flow."""
|
||||
"""Set up the Adax energy sensors with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
|
||||
# Create individual energy sensors for each device
|
||||
async_add_entities(
|
||||
[
|
||||
AdaxSensor(cloud_coordinator, entity_description, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
for entity_description in SENSORS
|
||||
]
|
||||
AdaxEnergySensor(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
)
|
||||
|
||||
|
||||
class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
"""Representation of an Adax sensor."""
|
||||
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
"""Representation of an Adax energy sensor."""
|
||||
|
||||
entity_description: AdaxSensorDescription
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "energy"
|
||||
_attr_device_class = SensorDeviceClass.ENERGY
|
||||
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
_attr_suggested_display_precision = 3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
entity_description: AdaxSensorDescription,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
"""Initialize the energy sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_id = device_id
|
||||
room = coordinator.data[device_id]
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{room['homeId']}_{device_id}_{self.entity_description.key}"
|
||||
)
|
||||
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=room["name"],
|
||||
@@ -100,14 +68,10 @@ class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.entity_description.data_key
|
||||
in self.coordinator.data[self._device_id]
|
||||
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int | float | None:
|
||||
def native_value(self) -> int:
|
||||
"""Return the native value of the sensor."""
|
||||
return self.coordinator.data[self._device_id].get(
|
||||
self.entity_description.data_key
|
||||
)
|
||||
return int(self.coordinator.data[self._device_id]["energyWh"])
|
||||
|
||||
@@ -30,7 +30,6 @@ generate_data:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
@@ -58,4 +57,3 @@ generate_image:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
|
||||
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
|
||||
from .const import DOMAIN, MFCT_ID
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
assert self._discovered_device is not None
|
||||
|
||||
if user_input is not None:
|
||||
if self._discovered_device.device.firmware.need_firmware_upgrade:
|
||||
if (
|
||||
self._discovered_device is not None
|
||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||
):
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.context["title_placeholders"]["name"],
|
||||
data={DEVICE_MODEL: self._discovered_device.device.model.value},
|
||||
title=self.context["title_placeholders"]["name"], data={}
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
@@ -164,10 +164,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self._discovered_device = discovery
|
||||
|
||||
return self.async_create_entry(
|
||||
title=discovery.name,
|
||||
data={DEVICE_MODEL: discovery.device.model.value},
|
||||
)
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
devices: list[BluetoothServiceInfoBleak] = []
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
"""Constants for Airthings BLE."""
|
||||
|
||||
from airthings_ble import AirthingsDeviceType
|
||||
|
||||
DOMAIN = "airthings_ble"
|
||||
MFCT_ID = 820
|
||||
|
||||
VOLUME_BECQUEREL = "Bq/m³"
|
||||
VOLUME_PICOCURIE = "pCi/L"
|
||||
|
||||
DEVICE_MODEL = "device_model"
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = 300
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
|
||||
|
||||
MAX_RETRIES_AFTER_STARTUP = 5
|
||||
|
||||
@@ -16,12 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import (
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEVICE_MODEL,
|
||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -39,18 +34,12 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
self.airthings = AirthingsBluetoothDeviceData(
|
||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||
)
|
||||
|
||||
device_model = entry.data.get(DEVICE_MODEL)
|
||||
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
device_model, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=interval),
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
@@ -69,29 +58,11 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
)
|
||||
self.ble_device = ble_device
|
||||
|
||||
if DEVICE_MODEL not in self.config_entry.data:
|
||||
_LOGGER.debug("Fetching device info for migration")
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(
|
||||
f"Unable to fetch data for migration: {err}"
|
||||
) from err
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self.config_entry,
|
||||
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
|
||||
)
|
||||
self.update_interval = timedelta(
|
||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
||||
data.model.value, DEFAULT_SCAN_INTERVAL
|
||||
)
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirthingsDevice:
|
||||
"""Get data from Airthings BLE."""
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
return data
|
||||
|
||||
@@ -58,10 +58,7 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import network
|
||||
from homeassistant.util import color as color_util, dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
)
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
|
||||
from .config import AbstractConfig
|
||||
from .const import (
|
||||
@@ -847,7 +844,7 @@ def temperature_from_object(
|
||||
temp -= 273.15
|
||||
|
||||
if interval:
|
||||
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
||||
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.exceptions import (
|
||||
CannotAuthenticate,
|
||||
CannotConnect,
|
||||
CannotRetrieveData,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
@@ -61,5 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"sensors": {key: asdict(sensor) for key, sensor in device.sensors.items()},
|
||||
"sensors": device.sensors,
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==8.0.1"]
|
||||
"requirements": ["aioamazondevices==6.5.5"]
|
||||
}
|
||||
|
||||
@@ -6,9 +6,8 @@ from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Final
|
||||
|
||||
from aioamazondevices.api import AmazonEchoApi
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
|
||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Final
|
||||
|
||||
from aioamazondevices.const.schedules import (
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
from aioamazondevices.const import (
|
||||
NOTIFICATION_ALARM,
|
||||
NOTIFICATION_REMINDER,
|
||||
NOTIFICATION_TIMER,
|
||||
)
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Support for services."""
|
||||
|
||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
||||
from aioamazondevices.sounds import SOUNDS_LIST
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
from aioamazondevices.api import AmazonDevice
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
DOMAIN as SWITCH_DOMAIN,
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
|
||||
@@ -9,14 +9,14 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .services import async_setup_services
|
||||
from .services import setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amber component."""
|
||||
async_setup_services(hass)
|
||||
setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -103,8 +102,7 @@ def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
return results
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amber integration."""
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
|
||||
@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="daily_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
key=TYPE_LIGHTNING_PER_DAY,
|
||||
translation_key="lightning_strikes_per_day",
|
||||
native_unit_of_measurement="strikes",
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="monthly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="weekly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
translation_key="yearly_rain",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
|
||||
@@ -39,11 +39,11 @@ from .const import (
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
CONF_TURN_ON_COMMAND,
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEFAULT_DEVICE_CLASS,
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||
DEFAULT_GET_SOURCES,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCREENCAP_INTERVAL,
|
||||
DEVICE_AUTO,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
@@ -89,14 +89,8 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=host): str,
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(value=k, label=v)
|
||||
for k, v in DEVICE_CLASSES.items()
|
||||
],
|
||||
translation_key="device_class",
|
||||
)
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
|
||||
DEVICE_CLASSES
|
||||
),
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
},
|
||||
|
||||
@@ -15,19 +15,15 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
|
||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||
|
||||
DEFAULT_ADB_SERVER_PORT = 5037
|
||||
DEFAULT_DEVICE_CLASS = "auto"
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||
DEFAULT_GET_SOURCES = True
|
||||
DEFAULT_PORT = 5555
|
||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||
|
||||
DEVICE_AUTO = "auto"
|
||||
DEVICE_ANDROIDTV = "androidtv"
|
||||
DEVICE_FIRETV = "firetv"
|
||||
DEVICE_CLASSES = {
|
||||
DEVICE_AUTO: "auto",
|
||||
DEVICE_ANDROIDTV: "Android TV",
|
||||
DEVICE_FIRETV: "Fire TV",
|
||||
}
|
||||
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
||||
|
||||
PROP_ETHMAC = "ethmac"
|
||||
PROP_SERIALNO = "serialno"
|
||||
|
||||
@@ -65,13 +65,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"device_class": {
|
||||
"options": {
|
||||
"auto": "Auto-detect device type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"adb_command": {
|
||||
"description": "Sends an ADB command to an Android / Fire TV device.",
|
||||
|
||||
@@ -14,11 +14,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
from homeassistant.helpers import (
|
||||
config_entry_oauth2_flow,
|
||||
device_registry as dr,
|
||||
issue_registry as ir,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
||||
@@ -38,10 +37,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
||||
|
||||
session = async_create_august_clientsession(hass)
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
except ValueError as err:
|
||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||
oauth_session = OAuth2Session(hass, entry, implementation)
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||
try:
|
||||
await async_setup_august(hass, entry, august_gateway)
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==5.1.0.247.1"],
|
||||
"requirements": ["mozart-api==4.1.1.116.4"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["bluecurrent_api"],
|
||||
"requirements": ["bluecurrent-api==1.3.2"]
|
||||
"requirements": ["bluecurrent-api==1.3.1"]
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""The blueprint integration."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import websocket_api
|
||||
@@ -28,4 +30,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the blueprint integration."""
|
||||
websocket_api.async_setup(hass)
|
||||
hass.async_create_task(
|
||||
async_load_platform(hass, Platform.UPDATE, DOMAIN, None, config)
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -204,8 +204,8 @@ class DomainBlueprints:
|
||||
self.hass = hass
|
||||
self.domain = domain
|
||||
self.logger = logger
|
||||
self._blueprint_in_use = blueprint_in_use
|
||||
self._reload_blueprint_consumers = reload_blueprint_consumers
|
||||
self.blueprint_in_use = blueprint_in_use
|
||||
self.reload_blueprint_consumers = reload_blueprint_consumers
|
||||
self._blueprints: dict[str, Blueprint | None] = {}
|
||||
self._load_lock = asyncio.Lock()
|
||||
self._blueprint_schema = blueprint_schema
|
||||
@@ -325,7 +325,7 @@ class DomainBlueprints:
|
||||
|
||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||
"""Remove a blueprint file."""
|
||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
||||
if self.blueprint_in_use(self.hass, blueprint_path):
|
||||
raise BlueprintInUse(self.domain, blueprint_path)
|
||||
path = self.blueprint_folder / blueprint_path
|
||||
await self.hass.async_add_executor_job(path.unlink)
|
||||
@@ -362,7 +362,7 @@ class DomainBlueprints:
|
||||
self._blueprints[blueprint_path] = blueprint
|
||||
|
||||
if overrides_existing:
|
||||
await self._reload_blueprint_consumers(self.hass, blueprint_path)
|
||||
await self.reload_blueprint_consumers(self.hass, blueprint_path)
|
||||
|
||||
return overrides_existing
|
||||
|
||||
|
||||
293
homeassistant/components/blueprint/update.py
Normal file
293
homeassistant/components/blueprint/update.py
Normal file
@@ -0,0 +1,293 @@
|
||||
"""Update entities for blueprints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any, Final
|
||||
|
||||
from homeassistant.components import automation, script
|
||||
from . import importer, models
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.const import CONF_SOURCE_URL
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import event as event_helper
|
||||
from homeassistant.helpers.entity import EntityCategory
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DOMAIN as BLUEPRINT_DOMAIN
|
||||
from .errors import BlueprintException
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_LATEST_VERSION_PLACEHOLDER: Final = "remote"
|
||||
DATA_UPDATE_MANAGER: Final = "update_manager"
|
||||
REFRESH_INTERVAL: Final = timedelta(days=1)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BlueprintUsage:
|
||||
"""Details about a blueprint currently in use."""
|
||||
|
||||
domain: str
|
||||
path: str
|
||||
domain_blueprints: models.DomainBlueprints
|
||||
blueprint: models.Blueprint
|
||||
entities: list[str]
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the blueprint update platform."""
|
||||
data = hass.data.setdefault(BLUEPRINT_DOMAIN, {})
|
||||
|
||||
if (manager := data.get(DATA_UPDATE_MANAGER)) is None:
|
||||
manager = BlueprintUpdateManager(hass, async_add_entities)
|
||||
data[DATA_UPDATE_MANAGER] = manager
|
||||
await manager.async_start()
|
||||
return
|
||||
|
||||
manager.replace_add_entities(async_add_entities)
|
||||
await manager.async_recreate_entities()
|
||||
|
||||
|
||||
class BlueprintUpdateManager:
|
||||
"""Manage blueprint update entities based on blueprint usage."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Initialize the manager."""
|
||||
self.hass = hass
|
||||
self._async_add_entities = async_add_entities
|
||||
self._entities: dict[tuple[str, str], BlueprintUpdateEntity] = {}
|
||||
self._lock = asyncio.Lock()
|
||||
self._refresh_cancel: CALLBACK_TYPE | None = None
|
||||
self._started = False
|
||||
self._interval_unsub: CALLBACK_TYPE | None = None
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start tracking blueprint usage."""
|
||||
if self._started:
|
||||
return
|
||||
self._started = True
|
||||
|
||||
self._interval_unsub = event_helper.async_track_time_interval(
|
||||
self.hass, self._handle_time_interval, REFRESH_INTERVAL
|
||||
)
|
||||
await self.async_refresh_entities()
|
||||
|
||||
def replace_add_entities(self, async_add_entities: AddEntitiesCallback) -> None:
|
||||
"""Update the callback used to register entities."""
|
||||
self._async_add_entities = async_add_entities
|
||||
|
||||
async def async_recreate_entities(self) -> None:
|
||||
"""Recreate entities after the platform has been reloaded."""
|
||||
async with self._lock:
|
||||
entities = list(self._entities.values())
|
||||
self._entities.clear()
|
||||
|
||||
for entity in entities:
|
||||
await entity.async_remove()
|
||||
|
||||
await self.async_refresh_entities()
|
||||
|
||||
async def async_refresh_entities(self) -> None:
|
||||
"""Refresh update entities based on current blueprint usage."""
|
||||
async with self._lock:
|
||||
usage_map = await self._async_collect_in_use_blueprints()
|
||||
|
||||
current_keys = set(self._entities)
|
||||
new_keys = set(usage_map)
|
||||
|
||||
for key in current_keys - new_keys:
|
||||
entity = self._entities.pop(key)
|
||||
await entity.async_remove()
|
||||
|
||||
new_entities: list[BlueprintUpdateEntity] = []
|
||||
|
||||
for key in new_keys - current_keys:
|
||||
usage = usage_map[key]
|
||||
entity = BlueprintUpdateEntity(self, usage)
|
||||
self._entities[key] = entity
|
||||
new_entities.append(entity)
|
||||
|
||||
for key in new_keys & current_keys:
|
||||
self._entities[key].update_usage(usage_map[key])
|
||||
self._entities[key].async_write_ha_state()
|
||||
|
||||
if new_entities:
|
||||
self._async_add_entities(new_entities)
|
||||
|
||||
def async_schedule_refresh(self) -> None:
|
||||
"""Schedule an asynchronous refresh."""
|
||||
if self._refresh_cancel is not None:
|
||||
return
|
||||
|
||||
self._refresh_cancel = event_helper.async_call_later(
|
||||
self.hass, 0, self._handle_scheduled_refresh
|
||||
)
|
||||
|
||||
@callback
|
||||
def _handle_scheduled_refresh(self, _now: Any) -> None:
|
||||
"""Run a scheduled refresh task."""
|
||||
self._refresh_cancel = None
|
||||
self.hass.async_create_task(self.async_refresh_entities())
|
||||
|
||||
@callback
|
||||
def _handle_time_interval(self, _now: Any) -> None:
|
||||
"""Handle scheduled interval refresh."""
|
||||
self.async_schedule_refresh()
|
||||
|
||||
async def _async_collect_in_use_blueprints(self) -> dict[tuple[str, str], BlueprintUsage]:
|
||||
"""Collect blueprint usage information for automations and scripts."""
|
||||
|
||||
usage_keys: set[tuple[str, str]] = set()
|
||||
|
||||
if automation.DATA_COMPONENT in self.hass.data:
|
||||
component = self.hass.data[automation.DATA_COMPONENT]
|
||||
for automation_entity in list(component.entities):
|
||||
if (path := getattr(automation_entity, "referenced_blueprint", None)):
|
||||
usage_keys.add((automation.DOMAIN, path))
|
||||
|
||||
if script.DOMAIN in self.hass.data:
|
||||
component = self.hass.data[script.DOMAIN]
|
||||
for script_entity in list(component.entities):
|
||||
if (path := getattr(script_entity, "referenced_blueprint", None)):
|
||||
usage_keys.add((script.DOMAIN, path))
|
||||
|
||||
domain_blueprints_map = self.hass.data.get(BLUEPRINT_DOMAIN, {})
|
||||
usage_map: dict[tuple[str, str], BlueprintUsage] = {}
|
||||
|
||||
for domain, path in usage_keys:
|
||||
domain_blueprints: models.DomainBlueprints | None = domain_blueprints_map.get(
|
||||
domain
|
||||
)
|
||||
|
||||
if domain_blueprints is None:
|
||||
continue
|
||||
|
||||
if not domain_blueprints.blueprint_in_use(self.hass, path):
|
||||
continue
|
||||
|
||||
try:
|
||||
blueprint = await domain_blueprints.async_get_blueprint(path)
|
||||
except BlueprintException:
|
||||
continue
|
||||
|
||||
source_url = blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
if not source_url:
|
||||
continue
|
||||
|
||||
if domain == automation.DOMAIN:
|
||||
entities = automation.automations_with_blueprint(self.hass, path)
|
||||
elif domain == script.DOMAIN:
|
||||
entities = script.scripts_with_blueprint(self.hass, path)
|
||||
else:
|
||||
entities = []
|
||||
|
||||
usage_map[(domain, path)] = BlueprintUsage(
|
||||
domain=domain,
|
||||
path=path,
|
||||
domain_blueprints=domain_blueprints,
|
||||
blueprint=blueprint,
|
||||
entities=entities,
|
||||
)
|
||||
|
||||
return usage_map
|
||||
|
||||
|
||||
class BlueprintUpdateEntity(UpdateEntity):
|
||||
"""Define a blueprint update entity."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
|
||||
def __init__(self, manager: BlueprintUpdateManager, usage: BlueprintUsage) -> None:
|
||||
"""Initialize the update entity."""
|
||||
self._manager = manager
|
||||
self._domain = usage.domain
|
||||
self._path = usage.path
|
||||
self._domain_blueprints = usage.domain_blueprints
|
||||
self._blueprint = usage.blueprint
|
||||
self._entities_in_use = usage.entities
|
||||
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
self._attr_unique_id = f"{self._domain}:{self._path}"
|
||||
self._attr_in_progress = False
|
||||
|
||||
self.update_usage(usage)
|
||||
|
||||
@callback
|
||||
def update_usage(self, usage: BlueprintUsage) -> None:
|
||||
"""Update the entity with latest usage information."""
|
||||
self._domain_blueprints = usage.domain_blueprints
|
||||
self._blueprint = usage.blueprint
|
||||
self._entities_in_use = usage.entities
|
||||
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||
|
||||
self._attr_name = usage.blueprint.name
|
||||
self._attr_release_summary = usage.blueprint.metadata.get("description")
|
||||
self._attr_installed_version = usage.blueprint.metadata.get("version")
|
||||
self._attr_release_url = self._source_url
|
||||
self._attr_available = self._source_url is not None
|
||||
self._attr_latest_version = (
|
||||
_LATEST_VERSION_PLACEHOLDER
|
||||
if self._source_url is not None
|
||||
else self._attr_installed_version
|
||||
)
|
||||
|
||||
async def async_install(self, version: str | None, backup: bool) -> None:
|
||||
"""Install (refresh) the blueprint from its source."""
|
||||
if self._source_url is None:
|
||||
raise HomeAssistantError("Blueprint does not define a source URL")
|
||||
|
||||
self._attr_in_progress = True
|
||||
self.async_write_ha_state()
|
||||
usage: BlueprintUsage | None = None
|
||||
|
||||
try:
|
||||
imported = await importer.fetch_blueprint_from_url(
|
||||
self.hass, self._source_url
|
||||
)
|
||||
blueprint = imported.blueprint
|
||||
|
||||
if blueprint.domain != self._domain:
|
||||
raise HomeAssistantError(
|
||||
"Downloaded blueprint domain does not match the existing blueprint"
|
||||
)
|
||||
|
||||
await self._domain_blueprints.async_add_blueprint(
|
||||
blueprint, self._path, allow_override=True
|
||||
)
|
||||
|
||||
usage = BlueprintUsage(
|
||||
domain=self._domain,
|
||||
path=self._path,
|
||||
domain_blueprints=self._domain_blueprints,
|
||||
blueprint=blueprint,
|
||||
entities=self._entities_in_use,
|
||||
)
|
||||
|
||||
except HomeAssistantError:
|
||||
raise
|
||||
except Exception as err: # noqa: BLE001 - Provide context for unexpected errors
|
||||
raise HomeAssistantError("Failed to update blueprint from source") from err
|
||||
finally:
|
||||
self._attr_in_progress = False
|
||||
|
||||
if usage is not None:
|
||||
self.update_usage(usage)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._manager.async_schedule_refresh()
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==2.45.0",
|
||||
"dbus-fast==2.44.5",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -99,12 +99,6 @@ def deserialize_entity_description(
|
||||
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
||||
for field in cached_fields(descriptions_class):
|
||||
field_name = field.name
|
||||
# Only set fields that are in the data
|
||||
# otherwise we would override default values with None
|
||||
# causing side effects
|
||||
if field_name not in data:
|
||||
continue
|
||||
|
||||
# It would be nice if field.type returned the actual
|
||||
# type instead of a str so we could avoid writing this
|
||||
# out, but it doesn't. If we end up using this in more
|
||||
|
||||
@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
@@ -50,15 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
if brother.serial.lower() != entry.unique_id:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="serial_mismatch",
|
||||
translation_placeholders={
|
||||
"device": entry.title,
|
||||
},
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
@@ -22,7 +21,6 @@ from .const import (
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPE_LASER,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
@@ -30,12 +28,7 @@ from .const import (
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
@@ -49,12 +42,7 @@ DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
|
||||
@@ -7,10 +7,7 @@ from typing import Final
|
||||
|
||||
DOMAIN: Final = "brother"
|
||||
|
||||
PRINTER_TYPE_LASER = "laser"
|
||||
PRINTER_TYPE_INK = "ink"
|
||||
|
||||
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Define the Brother entity."""
|
||||
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherDataUpdateCoordinator
|
||||
|
||||
|
||||
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
"""Define a Brother Printer entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BrotherDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
@@ -19,15 +19,13 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
from .entity import BrotherPrinterEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_COUNTER = "counter"
|
||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||
@@ -332,9 +330,12 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Define a Brother Printer sensor."""
|
||||
class BrotherPrinterSensor(
|
||||
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Define an Brother Printer sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: BrotherSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -344,7 +345,16 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@@ -38,11 +38,11 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"type": "Printer type"
|
||||
"type": "Type of the printer"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "The type of the Brother printer."
|
||||
"type": "Brother printer type: ink or laser."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
@@ -207,19 +207,8 @@
|
||||
"cannot_connect": {
|
||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||
},
|
||||
"serial_mismatch": {
|
||||
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"printer_type": {
|
||||
"options": {
|
||||
"ink": "ink",
|
||||
"laser": "laser"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
|
||||
return HVACAction.HEATING
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_parse_hvac_action",
|
||||
translation_key="failed_to_parse_hvac_mode",
|
||||
translation_placeholders={
|
||||
"mode_and_active": mode_and_active,
|
||||
"current_temperature": str(self.current_temperature),
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"failed_to_parse_hvac_action": {
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
|
||||
},
|
||||
"failed_to_parse_hvac_mode": {
|
||||
"message": "Cannot parse response to HVACMode: {mode}"
|
||||
|
||||
@@ -63,7 +63,6 @@ BINARY_SENSOR_DESCRIPTIONS = {
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.GENERIC,
|
||||
translation_key="generic",
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.LIGHT,
|
||||
@@ -160,7 +159,10 @@ def sensor_update_to_bluetooth_data_update(
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -59,7 +59,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeSensorDeviceClass.ACCELERATION}_{Units.ACCELERATION_METERS_PER_SQUARE_SECOND}",
|
||||
native_unit_of_measurement=Units.ACCELERATION_METERS_PER_SQUARE_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="acceleration",
|
||||
),
|
||||
# Battery (percent)
|
||||
(BTHomeSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
@@ -73,7 +72,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="channel",
|
||||
),
|
||||
# Conductivity (μS/cm)
|
||||
(
|
||||
@@ -89,7 +87,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.COUNT),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="count",
|
||||
),
|
||||
# CO2 (parts per million)
|
||||
(
|
||||
@@ -117,14 +114,12 @@ SENSOR_DESCRIPTIONS = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="dew_point",
|
||||
),
|
||||
# Directions (°)
|
||||
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="direction",
|
||||
),
|
||||
# Distance (mm)
|
||||
(
|
||||
@@ -178,7 +173,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeSensorDeviceClass.GYROSCOPE}_{Units.GYROSCOPE_DEGREES_PER_SECOND}",
|
||||
native_unit_of_measurement=Units.GYROSCOPE_DEGREES_PER_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="gyroscope",
|
||||
),
|
||||
# Humidity in (percent)
|
||||
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
@@ -221,7 +215,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
translation_key="packet_id",
|
||||
),
|
||||
# PM10 (μg/m3)
|
||||
(
|
||||
@@ -270,14 +263,12 @@ SENSOR_DESCRIPTIONS = {
|
||||
# Raw (-)
|
||||
(BTHomeExtendedSensorDeviceClass.RAW, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.RAW),
|
||||
translation_key="raw",
|
||||
),
|
||||
# Rotation (°)
|
||||
(BTHomeSensorDeviceClass.ROTATION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.ROTATION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="rotation",
|
||||
),
|
||||
# Rotational speed (rpm)
|
||||
(
|
||||
@@ -287,7 +278,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="rotational_speed",
|
||||
),
|
||||
# Signal Strength (RSSI) (dB)
|
||||
(
|
||||
@@ -321,7 +311,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
# Text (-)
|
||||
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
|
||||
translation_key="text",
|
||||
),
|
||||
# Timestamp (datetime object)
|
||||
(
|
||||
@@ -338,7 +327,6 @@ SENSOR_DESCRIPTIONS = {
|
||||
): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="uv_index",
|
||||
),
|
||||
# Volatile organic Compounds (VOC) (μg/m3)
|
||||
(
|
||||
@@ -435,7 +423,10 @@ def sensor_update_to_bluetooth_data_update(
|
||||
)
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -47,11 +47,6 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"generic": {
|
||||
"name": "Generic"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button": {
|
||||
"state_attributes": {
|
||||
@@ -78,44 +73,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"acceleration": {
|
||||
"name": "Acceleration"
|
||||
},
|
||||
"channel": {
|
||||
"name": "Channel"
|
||||
},
|
||||
"count": {
|
||||
"name": "Count"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
},
|
||||
"direction": {
|
||||
"name": "Direction"
|
||||
},
|
||||
"gyroscope": {
|
||||
"name": "Gyroscope"
|
||||
},
|
||||
"packet_id": {
|
||||
"name": "Packet ID"
|
||||
},
|
||||
"raw": {
|
||||
"name": "Raw"
|
||||
},
|
||||
"rotation": {
|
||||
"name": "Rotation"
|
||||
},
|
||||
"rotational_speed": {
|
||||
"name": "Rotational speed"
|
||||
},
|
||||
"text": {
|
||||
"name": "Text"
|
||||
},
|
||||
"uv_index": {
|
||||
"name": "UV Index"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,11 +71,8 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
|
||||
services = await account_link.async_fetch_available_services(
|
||||
hass.data[DATA_CLOUD]
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError) as err:
|
||||
raise config_entry_oauth2_flow.ImplementationUnavailableError(
|
||||
"Cannot provide OAuth2 implementation for cloud services. "
|
||||
"Failed to fetch from account link server."
|
||||
) from err
|
||||
except (aiohttp.ClientError, TimeoutError):
|
||||
return []
|
||||
|
||||
hass.data[DATA_SERVICES] = services
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.10.28"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,3 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pycoolmasternet_async import CoolMasterNet
|
||||
@@ -13,7 +12,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,34 +46,7 @@ class CoolmasterDataUpdateCoordinator(
|
||||
|
||||
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
||||
"""Fetch data from Coolmaster."""
|
||||
retries_left = MAX_RETRIES
|
||||
status: dict[str, CoolMasterNetUnit] = {}
|
||||
while retries_left > 0 and not status:
|
||||
retries_left -= 1
|
||||
try:
|
||||
status = await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
if retries_left == 0:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
|
||||
) from error
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster (%d retries left): %s",
|
||||
retries_left,
|
||||
str(error),
|
||||
)
|
||||
else:
|
||||
if status:
|
||||
return status
|
||||
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster: empty status received (%d retries left)",
|
||||
retries_left,
|
||||
)
|
||||
|
||||
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
|
||||
await asyncio.sleep(backoff)
|
||||
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
|
||||
)
|
||||
try:
|
||||
return await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
raise UpdateFailed from error
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
|
||||
@@ -23,8 +22,6 @@ from .const import DOMAIN, PLATFORMS
|
||||
|
||||
type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: DevoloHomeControlConfigEntry
|
||||
@@ -47,29 +44,26 @@ async def async_setup_entry(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
||||
)
|
||||
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
entry.runtime_data = []
|
||||
offline_gateways = 0
|
||||
for gateway_id in gateway_ids:
|
||||
try:
|
||||
try:
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
entry.runtime_data = []
|
||||
for gateway_id in gateway_ids:
|
||||
entry.runtime_data.append(
|
||||
await hass.async_add_executor_job(
|
||||
partial(
|
||||
HomeControl,
|
||||
gateway_id=gateway_id,
|
||||
gateway_id=str(gateway_id),
|
||||
mydevolo_instance=mydevolo,
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
)
|
||||
)
|
||||
)
|
||||
except GatewayOfflineError:
|
||||
offline_gateways += 1
|
||||
_LOGGER.info("Central unit %s cannot be reached locally", gateway_id)
|
||||
if len(gateway_ids) == offline_gateways:
|
||||
except GatewayOfflineError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
)
|
||||
translation_placeholders={"gateway_id": gateway_id},
|
||||
) from err
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["HomeControl", "Mydevolo", "MprmRest", "MprmWebsocket", "Mprm"],
|
||||
"loggers": ["devolo_home_control_api"],
|
||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_failed": {
|
||||
"message": "Failed to connect to any devolo Home Control central unit."
|
||||
"message": "Failed to connect to devolo Home Control central unit {gateway_id}."
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "Authentication failed. Please re-authenticate with your mydevolo account."
|
||||
|
||||
@@ -81,9 +81,6 @@
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
@@ -92,6 +89,9 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"error": {
|
||||
"default": "mdi:alert-circle"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
|
||||
}
|
||||
|
||||
@@ -5,9 +5,8 @@ from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.command import CommandWithMessageHandling
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent, auto_empty
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
@@ -35,9 +34,6 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
||||
|
||||
current_option_fn: Callable[[EventT], str | None]
|
||||
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
||||
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
|
||||
lambda cap, option: cap.set(option)
|
||||
)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
@@ -62,14 +58,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
|
||||
set_option_fn=lambda cap, option: cap.set(None, option),
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -118,17 +106,14 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_event(event: EventT) -> None:
|
||||
if (option := self.entity_description.current_option_fn(event)) is not None:
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
self._attr_current_option = self.entity_description.current_option_fn(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.event, on_event)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(
|
||||
self.entity_description.set_option_fn(self._capability, option)
|
||||
)
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
|
||||
@@ -17,6 +17,7 @@ from deebot_client.events import (
|
||||
NetworkInfoEvent,
|
||||
StatsEvent,
|
||||
TotalStatsEvent,
|
||||
auto_empty,
|
||||
station,
|
||||
)
|
||||
from sucks import VacBot
|
||||
@@ -158,6 +159,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=get_options(station.State),
|
||||
),
|
||||
EcovacsSensorEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
value_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=get_options(auto_empty.Frequency),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -129,16 +129,6 @@
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"min_10": "10 minutes",
|
||||
"min_15": "15 minutes",
|
||||
"min_25": "25 minutes",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
@@ -159,6 +149,13 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"name": "Error",
|
||||
"state_attributes": {
|
||||
|
||||
@@ -151,12 +151,14 @@ ECOWITT_SENSORS_MAPPING: Final = {
|
||||
key="RAIN_COUNT_MM",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
|
||||
key="RAIN_COUNT_INCHES",
|
||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||
device_class=SensorDeviceClass.PRECIPITATION,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(
|
||||
|
||||
@@ -296,7 +296,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_discovered_connection()
|
||||
return await self.async_step_manual_connection()
|
||||
|
||||
current_unique_ids = self._async_current_ids(include_ignore=False)
|
||||
current_unique_ids = self._async_current_ids()
|
||||
current_hosts = {
|
||||
hostname_from_url(entry.data[CONF_HOST])
|
||||
for entry in self._async_current_entries(include_ignore=False)
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["elkm1_lib"],
|
||||
"requirements": ["elkm1-lib==2.2.12"]
|
||||
"requirements": ["elkm1-lib==2.2.11"]
|
||||
}
|
||||
|
||||
@@ -189,7 +189,9 @@ class ElkPanel(ElkSensor):
|
||||
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
if self._elk.is_connected():
|
||||
self._attr_native_value = "Paused" if self._elk.is_paused() else "Connected"
|
||||
self._attr_native_value = (
|
||||
"Paused" if self._element.remote_programming_status else "Connected"
|
||||
)
|
||||
else:
|
||||
self._attr_native_value = "Disconnected"
|
||||
|
||||
|
||||
1
homeassistant/components/enmax/__init__.py
Normal file
1
homeassistant/components/enmax/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Enmax Energy."""
|
||||
6
homeassistant/components/enmax/manifest.json
Normal file
6
homeassistant/components/enmax/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "enmax",
|
||||
"name": "Enmax Energy",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
}
|
||||
@@ -2,9 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from aioesphomeapi import APIClient, APIConnectionError
|
||||
from aioesphomeapi import APIClient
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.components.bluetooth import async_remove_scanner
|
||||
@@ -22,12 +20,9 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from . import assist_satellite, dashboard, ffmpeg_proxy
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DOMAIN
|
||||
from .domain_data import DomainData
|
||||
from .encryption_key_storage import async_get_encryption_key_storage
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .manager import DEVICE_CONFLICT_ISSUE_FORMAT, ESPHomeManager, cleanup_instance
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
CLIENT_INFO = f"Home Assistant {ha_version}"
|
||||
@@ -80,12 +75,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> bool:
|
||||
"""Unload an esphome config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, entry.runtime_data.loaded_platforms
|
||||
entry_data = await cleanup_instance(entry)
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
entry, entry_data.loaded_platforms
|
||||
)
|
||||
if unload_ok:
|
||||
await cleanup_instance(entry)
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||
@@ -96,57 +89,3 @@ async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) ->
|
||||
hass, DOMAIN, DEVICE_CONFLICT_ISSUE_FORMAT.format(entry.entry_id)
|
||||
)
|
||||
await DomainData.get(hass).get_or_create_store(hass, entry).async_remove()
|
||||
|
||||
await _async_clear_dynamic_encryption_key(hass, entry)
|
||||
|
||||
|
||||
async def _async_clear_dynamic_encryption_key(
|
||||
hass: HomeAssistant, entry: ESPHomeConfigEntry
|
||||
) -> None:
|
||||
"""Clear the dynamic encryption key on the device and from storage."""
|
||||
if entry.unique_id is None or entry.data.get(CONF_NOISE_PSK) is None:
|
||||
return
|
||||
|
||||
# Only clear the key if it's stored in our storage, meaning it was
|
||||
# dynamically generated by us and not user-provided
|
||||
storage = await async_get_encryption_key_storage(hass)
|
||||
if await storage.async_get_key(entry.unique_id) is None:
|
||||
return
|
||||
|
||||
host: str = entry.data[CONF_HOST]
|
||||
port: int = entry.data[CONF_PORT]
|
||||
password: str | None = entry.data[CONF_PASSWORD]
|
||||
noise_psk: str | None = entry.data.get(CONF_NOISE_PSK)
|
||||
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
|
||||
cli = APIClient(
|
||||
host,
|
||||
port,
|
||||
password,
|
||||
client_info=CLIENT_INFO,
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
noise_psk=noise_psk,
|
||||
timezone=hass.config.time_zone,
|
||||
)
|
||||
|
||||
try:
|
||||
await cli.connect()
|
||||
# Clear the encryption key on the device by passing an empty key
|
||||
if not await cli.noise_encryption_set_key(b""):
|
||||
_LOGGER.debug(
|
||||
"Could not clear dynamic encryption key for ESPHome device %s: Device rejected key removal",
|
||||
entry.unique_id,
|
||||
)
|
||||
return
|
||||
except APIConnectionError as exc:
|
||||
_LOGGER.debug(
|
||||
"Could not connect to ESPHome device %s to clear dynamic encryption key: %s",
|
||||
entry.unique_id,
|
||||
exc,
|
||||
)
|
||||
return
|
||||
finally:
|
||||
await cli.disconnect()
|
||||
|
||||
await storage.async_remove_key(entry.unique_id)
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.7.0",
|
||||
"aioesphomeapi==42.5.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -77,7 +77,7 @@ class EufyLifeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data={CONF_MODEL: model},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if (
|
||||
|
||||
@@ -11,7 +11,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .services import async_setup_services
|
||||
from .services import async_register_services
|
||||
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
@@ -20,7 +20,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the file component."""
|
||||
async_setup_services(hass)
|
||||
async_register_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -6,29 +6,29 @@ import json
|
||||
import voluptuous as vol
|
||||
import yaml
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import ATTR_FILE_ENCODING, ATTR_FILE_NAME, DOMAIN, SERVICE_READ_FILE
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
def async_register_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for File integration."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_READ_FILE,
|
||||
read_file,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_FILE_NAME): cv.string,
|
||||
vol.Required(ATTR_FILE_ENCODING): cv.string,
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
if not hass.services.has_service(DOMAIN, SERVICE_READ_FILE):
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_READ_FILE,
|
||||
read_file,
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_FILE_NAME): cv.string,
|
||||
vol.Required(ATTR_FILE_ENCODING): cv.string,
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
ENCODING_LOADERS: dict[str, tuple[Callable, type[Exception]]] = {
|
||||
|
||||
@@ -129,51 +129,6 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconf_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reconf_entry.data,
|
||||
**user_input,
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
return self.async_update_reload_and_abort(
|
||||
reconf_entry,
|
||||
data_updates={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values=user_input or reconf_entry.data.copy(),
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -21,20 +20,6 @@
|
||||
},
|
||||
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::firefly_iii::config::step::user::data_description::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:component::firefly_iii::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Firefly III instance.",
|
||||
"title": "Reconfigure Firefly III Integration"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
|
||||
@@ -37,7 +37,6 @@ class FoscamDeviceInfo:
|
||||
supports_speak_volume_adjustment: bool
|
||||
supports_pet_adjustment: bool
|
||||
supports_car_adjustment: bool
|
||||
supports_human_adjustment: bool
|
||||
supports_wdr_adjustment: bool
|
||||
supports_hdr_adjustment: bool
|
||||
|
||||
@@ -145,32 +144,24 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
human_adjustment_val = (
|
||||
bool(int(software_capabilities.get("swCapabilities2")) & 128)
|
||||
if ret_sw == 0
|
||||
else False
|
||||
)
|
||||
ret_md, motion_config_val = self.session.get_motion_detect_config()
|
||||
ret_md, mothion_config_val = self.session.get_motion_detect_config()
|
||||
if pet_adjustment_val:
|
||||
is_pet_detection_on_val = (
|
||||
motion_config_val.get("petEnable") == "1" if ret_md == 0 else False
|
||||
mothion_config_val["petEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_pet_detection_on_val = False
|
||||
|
||||
if car_adjustment_val:
|
||||
is_car_detection_on_val = (
|
||||
motion_config_val.get("carEnable") == "1" if ret_md == 0 else False
|
||||
mothion_config_val["carEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_car_detection_on_val = False
|
||||
|
||||
if human_adjustment_val:
|
||||
is_human_detection_on_val = (
|
||||
motion_config_val.get("humanEnable") == "1" if ret_md == 0 else False
|
||||
)
|
||||
else:
|
||||
is_human_detection_on_val = False
|
||||
is_human_detection_on_val = (
|
||||
mothion_config_val["humanEnable"] == "1" if ret_md == 0 else False
|
||||
)
|
||||
|
||||
return FoscamDeviceInfo(
|
||||
dev_info=dev_info,
|
||||
@@ -188,7 +179,6 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
||||
supports_speak_volume_adjustment=supports_speak_volume_adjustment_val,
|
||||
supports_pet_adjustment=pet_adjustment_val,
|
||||
supports_car_adjustment=car_adjustment_val,
|
||||
supports_human_adjustment=human_adjustment_val,
|
||||
supports_hdr_adjustment=supports_hdr_adjustment_val,
|
||||
supports_wdr_adjustment=supports_wdr_adjustment_val,
|
||||
is_open_wdr=is_open_wdr,
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/foscam",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["libpyfoscamcgi"],
|
||||
"requirements": ["libpyfoscamcgi==0.0.9"]
|
||||
"requirements": ["libpyfoscamcgi==0.0.8"]
|
||||
}
|
||||
|
||||
@@ -143,7 +143,6 @@ SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
||||
native_value_fn=lambda data: data.is_human_detection_on,
|
||||
turn_off_fn=lambda session: set_motion_detection(session, "humanEnable", False),
|
||||
turn_on_fn=lambda session: set_motion_detection(session, "humanEnable", True),
|
||||
exists_fn=lambda coordinator: coordinator.data.supports_human_adjustment,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -826,21 +826,6 @@ class AvmWrapper(FritzBoxTools):
|
||||
NewDisallow="0" if turn_on else "1",
|
||||
)
|
||||
|
||||
async def async_get_current_user_rights(self) -> dict[str, Any]:
|
||||
"""Call X_AVM-DE_GetCurrentUser service."""
|
||||
|
||||
result = await self._async_service_call(
|
||||
"LANConfigSecurity",
|
||||
"1",
|
||||
"X_AVM-DE_GetCurrentUser",
|
||||
)
|
||||
|
||||
user_rights = xmltodict.parse(result["NewX_AVM-DE_CurrentUserRights"])["rights"]
|
||||
|
||||
return {
|
||||
k: user_rights["access"][idx] for idx, k in enumerate(user_rights["path"])
|
||||
}
|
||||
|
||||
async def async_wake_on_lan(self, mac_address: str) -> dict[str, Any]:
|
||||
"""Call X_AVM-DE_WakeOnLANByMACAddress service."""
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ async def async_get_config_entry_diagnostics(
|
||||
"last_update success": avm_wrapper.last_update_success,
|
||||
"last_exception": avm_wrapper.last_exception,
|
||||
"discovered_services": list(avm_wrapper.connection.services),
|
||||
"current_user_rights": await avm_wrapper.async_get_current_user_rights(),
|
||||
"client_devices": [
|
||||
{
|
||||
"connected_to": device.connected_to,
|
||||
|
||||
@@ -263,9 +263,6 @@ class Panel:
|
||||
# Title to show in the sidebar
|
||||
sidebar_title: str | None = None
|
||||
|
||||
# If the panel should be visible by default in the sidebar
|
||||
sidebar_default_visible: bool = True
|
||||
|
||||
# Url to show the panel in the frontend
|
||||
frontend_url_path: str
|
||||
|
||||
@@ -283,7 +280,6 @@ class Panel:
|
||||
component_name: str,
|
||||
sidebar_title: str | None,
|
||||
sidebar_icon: str | None,
|
||||
sidebar_default_visible: bool,
|
||||
frontend_url_path: str | None,
|
||||
config: dict[str, Any] | None,
|
||||
require_admin: bool,
|
||||
@@ -297,7 +293,6 @@ class Panel:
|
||||
self.config = config
|
||||
self.require_admin = require_admin
|
||||
self.config_panel_domain = config_panel_domain
|
||||
self.sidebar_default_visible = sidebar_default_visible
|
||||
|
||||
@callback
|
||||
def to_response(self) -> PanelResponse:
|
||||
@@ -306,7 +301,6 @@ class Panel:
|
||||
"component_name": self.component_name,
|
||||
"icon": self.sidebar_icon,
|
||||
"title": self.sidebar_title,
|
||||
"default_visible": self.sidebar_default_visible,
|
||||
"config": self.config,
|
||||
"url_path": self.frontend_url_path,
|
||||
"require_admin": self.require_admin,
|
||||
@@ -321,7 +315,6 @@ def async_register_built_in_panel(
|
||||
component_name: str,
|
||||
sidebar_title: str | None = None,
|
||||
sidebar_icon: str | None = None,
|
||||
sidebar_default_visible: bool = True,
|
||||
frontend_url_path: str | None = None,
|
||||
config: dict[str, Any] | None = None,
|
||||
require_admin: bool = False,
|
||||
@@ -334,7 +327,6 @@ def async_register_built_in_panel(
|
||||
component_name,
|
||||
sidebar_title,
|
||||
sidebar_icon,
|
||||
sidebar_default_visible,
|
||||
frontend_url_path,
|
||||
config,
|
||||
require_admin,
|
||||
@@ -461,7 +453,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
async_register_built_in_panel(hass, "light")
|
||||
async_register_built_in_panel(hass, "security")
|
||||
async_register_built_in_panel(hass, "safety")
|
||||
async_register_built_in_panel(hass, "climate")
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
@@ -887,7 +879,6 @@ class PanelResponse(TypedDict):
|
||||
component_name: str
|
||||
icon: str | None
|
||||
title: str | None
|
||||
default_visible: bool
|
||||
config: dict[str, Any] | None
|
||||
url_path: str
|
||||
require_admin: bool
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.0"]
|
||||
"requirements": ["home-assistant-frontend==20251029.1"]
|
||||
}
|
||||
|
||||
@@ -43,9 +43,6 @@ from .coordinator import GiosConfigEntry, GiosDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GiosSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
@@ -14,10 +14,6 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"station_id": "Measuring station"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "Config entry name, by default, this is the name of your Home Assistant instance.",
|
||||
"station_id": "The name of the measuring station where the environmental data is collected."
|
||||
},
|
||||
"title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,21 +136,6 @@ async def async_setup_entry(
|
||||
new_data[CONF_URL] = url
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
|
||||
# Migrate legacy config entries without auth_type field
|
||||
if CONF_AUTH_TYPE not in config:
|
||||
new_data = dict(config_entry.data)
|
||||
# Detect auth type based on which fields are present
|
||||
if CONF_TOKEN in config:
|
||||
new_data[CONF_AUTH_TYPE] = AUTH_API_TOKEN
|
||||
elif CONF_USERNAME in config:
|
||||
new_data[CONF_AUTH_TYPE] = AUTH_PASSWORD
|
||||
else:
|
||||
raise ConfigEntryError(
|
||||
"Unable to determine authentication type from config entry."
|
||||
)
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
config = config_entry.data
|
||||
|
||||
# Determine API version
|
||||
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
||||
api_version = "v1"
|
||||
|
||||
@@ -3,9 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import itertools
|
||||
|
||||
from aiohasupervisor.models.mounts import MountState
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -16,14 +13,8 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import (
|
||||
ADDONS_COORDINATOR,
|
||||
ATTR_STARTED,
|
||||
ATTR_STATE,
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_MOUNTS,
|
||||
)
|
||||
from .entity import HassioAddonEntity, HassioMountEntity
|
||||
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
|
||||
from .entity import HassioAddonEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -43,16 +34,6 @@ ADDON_ENTITY_DESCRIPTIONS = (
|
||||
),
|
||||
)
|
||||
|
||||
MOUNT_ENTITY_DESCRIPTIONS = (
|
||||
HassioBinarySensorEntityDescription(
|
||||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_registry_enabled_default=False,
|
||||
key=ATTR_STATE,
|
||||
translation_key="mount",
|
||||
target=MountState.ACTIVE.value,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -63,26 +44,13 @@ async def async_setup_entry(
|
||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
||||
|
||||
async_add_entities(
|
||||
itertools.chain(
|
||||
[
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
[
|
||||
HassioMountBinarySensor(
|
||||
mount=mount,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for mount in coordinator.data[DATA_KEY_MOUNTS].values()
|
||||
for entity_description in MOUNT_ENTITY_DESCRIPTIONS
|
||||
],
|
||||
HassioAddonBinarySensor(
|
||||
addon=addon,
|
||||
coordinator=coordinator,
|
||||
entity_description=entity_description,
|
||||
)
|
||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
@@ -100,20 +68,3 @@ class HassioAddonBinarySensor(HassioAddonEntity, BinarySensorEntity):
|
||||
if self.entity_description.target is None:
|
||||
return value
|
||||
return value == self.entity_description.target
|
||||
|
||||
|
||||
class HassioMountBinarySensor(HassioMountEntity, BinarySensorEntity):
|
||||
"""Binary sensor for Hass.io mount."""
|
||||
|
||||
entity_description: HassioBinarySensorEntityDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
value = getattr(
|
||||
self.coordinator.data[DATA_KEY_MOUNTS][self._mount.name],
|
||||
self.entity_description.key,
|
||||
)
|
||||
if self.entity_description.target is None:
|
||||
return value
|
||||
return value == self.entity_description.target
|
||||
|
||||
@@ -110,7 +110,6 @@ DATA_KEY_SUPERVISOR = "supervisor"
|
||||
DATA_KEY_CORE = "core"
|
||||
DATA_KEY_HOST = "host"
|
||||
DATA_KEY_SUPERVISOR_ISSUES = "supervisor_issues"
|
||||
DATA_KEY_MOUNTS = "mounts"
|
||||
|
||||
PLACEHOLDER_KEY_ADDON = "addon"
|
||||
PLACEHOLDER_KEY_ADDON_INFO = "addon_info"
|
||||
@@ -175,4 +174,3 @@ class SupervisorEntityModel(StrEnum):
|
||||
CORE = "Home Assistant Core"
|
||||
SUPERVISOR = "Home Assistant Supervisor"
|
||||
HOST = "Home Assistant Host"
|
||||
MOUNT = "Home Assistant Mount"
|
||||
|
||||
@@ -10,7 +10,6 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
|
||||
from aiohasupervisor.models import StoreInfo
|
||||
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
|
||||
@@ -42,7 +41,6 @@ from .const import (
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_MOUNTS,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DATA_KEY_SUPERVISOR_ISSUES,
|
||||
@@ -205,25 +203,6 @@ def async_register_addons_in_dev_reg(
|
||||
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_mounts_in_dev_reg(
|
||||
entry_id: str,
|
||||
dev_reg: dr.DeviceRegistry,
|
||||
mounts: list[CIFSMountResponse | NFSMountResponse],
|
||||
) -> None:
|
||||
"""Register mounts in the device registry."""
|
||||
for mount in mounts:
|
||||
params = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"mount_{mount.name}")},
|
||||
manufacturer="Home Assistant",
|
||||
model=SupervisorEntityModel.MOUNT,
|
||||
model_id=f"{mount.usage}/{mount.type}",
|
||||
name=mount.name,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_os_in_dev_reg(
|
||||
entry_id: str, dev_reg: dr.DeviceRegistry, os_dict: dict[str, Any]
|
||||
@@ -293,12 +272,12 @@ def async_register_supervisor_in_dev_reg(
|
||||
|
||||
|
||||
@callback
|
||||
def async_remove_devices_from_dev_reg(
|
||||
dev_reg: dr.DeviceRegistry, devices: set[str]
|
||||
def async_remove_addons_from_dev_reg(
|
||||
dev_reg: dr.DeviceRegistry, addons: set[str]
|
||||
) -> None:
|
||||
"""Remove devices from the device registry."""
|
||||
for device in devices:
|
||||
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, device)}):
|
||||
"""Remove addons from the device registry."""
|
||||
for addon_slug in addons:
|
||||
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, addon_slug)}):
|
||||
dev_reg.async_remove_device(dev.id)
|
||||
|
||||
|
||||
@@ -349,7 +328,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
addons_info = get_addons_info(self.hass) or {}
|
||||
addons_stats = get_addons_stats(self.hass)
|
||||
store_data = get_store(self.hass)
|
||||
mounts_info = await self.supervisor_client.mounts.info()
|
||||
|
||||
if store_data:
|
||||
repositories = {
|
||||
@@ -384,16 +362,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
**get_supervisor_stats(self.hass),
|
||||
}
|
||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
|
||||
|
||||
# If this is the initial refresh, register all addons and return the dict
|
||||
if is_first_update:
|
||||
async_register_addons_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||
)
|
||||
async_register_mounts_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
|
||||
)
|
||||
async_register_core_in_dev_reg(
|
||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
||||
)
|
||||
@@ -415,20 +389,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
if device.model == SupervisorEntityModel.ADDON
|
||||
}
|
||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
# Remove mounts that no longer exists from device registry
|
||||
supervisor_mount_devices = {
|
||||
device.name
|
||||
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
|
||||
self.entry_id
|
||||
)
|
||||
if device.model == SupervisorEntityModel.MOUNT
|
||||
}
|
||||
if stale_mounts := supervisor_mount_devices - set(new_data[DATA_KEY_MOUNTS]):
|
||||
async_remove_devices_from_dev_reg(
|
||||
self.dev_reg, {f"mount_{stale_mount}" for stale_mount in stale_mounts}
|
||||
)
|
||||
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
||||
|
||||
if not self.is_hass_os and (
|
||||
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
||||
@@ -436,12 +397,11 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
# Remove the OS device if it exists and the installation is not hassos
|
||||
self.dev_reg.async_remove_device(dev.id)
|
||||
|
||||
# If there are new add-ons or mounts, we should reload the config entry so we can
|
||||
# If there are new add-ons, we should reload the config entry so we can
|
||||
# create new devices and entities. We can return an empty dict because
|
||||
# coordinator will be recreated.
|
||||
if self.data and (
|
||||
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
|
||||
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
|
||||
if self.data and set(new_data[DATA_KEY_ADDONS]) - set(
|
||||
self.data[DATA_KEY_ADDONS]
|
||||
):
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.entry_id)
|
||||
|
||||
@@ -4,8 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
@@ -17,7 +15,6 @@ from .const import (
|
||||
DATA_KEY_ADDONS,
|
||||
DATA_KEY_CORE,
|
||||
DATA_KEY_HOST,
|
||||
DATA_KEY_MOUNTS,
|
||||
DATA_KEY_OS,
|
||||
DATA_KEY_SUPERVISOR,
|
||||
DOMAIN,
|
||||
@@ -195,34 +192,3 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
)
|
||||
if CONTAINER_STATS in update_types:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
|
||||
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
||||
"""Base Entity for Mount."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HassioDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
mount: CIFSMountResponse | NFSMountResponse,
|
||||
) -> None:
|
||||
"""Initialize base entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = (
|
||||
f"home_assistant_mount_{mount.name}_{entity_description.key}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"mount_{mount.name}")}
|
||||
)
|
||||
self._mount = mount
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self._mount.name in self.coordinator.data[DATA_KEY_MOUNTS]
|
||||
)
|
||||
|
||||
@@ -44,6 +44,7 @@ from .const import (
|
||||
EVENT_SUPPORTED_CHANGED,
|
||||
EXTRA_PLACEHOLDERS,
|
||||
ISSUE_KEY_ADDON_BOOT_FAIL,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
|
||||
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
@@ -86,6 +87,7 @@ ISSUE_KEYS_FOR_REPAIRS = {
|
||||
"issue_system_disk_lifetime",
|
||||
ISSUE_KEY_SYSTEM_FREE_SPACE,
|
||||
ISSUE_KEY_ADDON_PWNED,
|
||||
ISSUE_KEY_ADDON_DEPRECATED,
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"mount": {
|
||||
"name": "Connected"
|
||||
},
|
||||
"state": {
|
||||
"name": "Running"
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.84", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.83", "babel==2.15.0"]
|
||||
}
|
||||
|
||||
@@ -12,11 +12,10 @@ import jwt
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
from homeassistant.helpers import (
|
||||
config_entry_oauth2_flow,
|
||||
config_validation as cv,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -49,15 +48,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: HomeConnectConfigEntry) -> bool:
|
||||
"""Set up Home Connect from a config entry."""
|
||||
try:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="oauth2_implementation_unavailable",
|
||||
) from err
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
|
||||
config_entry_auth = AsyncConfigEntryAuth(hass, session)
|
||||
try:
|
||||
|
||||
@@ -1236,9 +1236,6 @@
|
||||
"fetch_api_error": {
|
||||
"message": "Error obtaining data from the API: {error}"
|
||||
},
|
||||
"oauth2_implementation_unavailable": {
|
||||
"message": "OAuth2 implementation temporarily unavailable, will retry"
|
||||
},
|
||||
"pause_program": {
|
||||
"message": "Error pausing program: {error}"
|
||||
},
|
||||
|
||||
@@ -39,8 +39,6 @@ from .const import (
|
||||
NABU_CASA_FIRMWARE_RELEASES_URL,
|
||||
PID,
|
||||
PRODUCT,
|
||||
RADIO_TX_POWER_DBM_BY_COUNTRY,
|
||||
RADIO_TX_POWER_DBM_DEFAULT,
|
||||
SERIAL_NUMBER,
|
||||
VID,
|
||||
)
|
||||
@@ -77,7 +75,6 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
|
||||
context: ConfigFlowContext
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -105,21 +102,6 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
next_step_id="finish_thread_installation",
|
||||
)
|
||||
|
||||
def _extra_zha_hardware_options(self) -> dict[str, Any]:
|
||||
"""Return extra ZHA hardware options."""
|
||||
country = self.hass.config.country
|
||||
|
||||
if country is None:
|
||||
tx_power = RADIO_TX_POWER_DBM_DEFAULT
|
||||
else:
|
||||
tx_power = RADIO_TX_POWER_DBM_BY_COUNTRY.get(
|
||||
country, RADIO_TX_POWER_DBM_DEFAULT
|
||||
)
|
||||
|
||||
return {
|
||||
"tx_power": tx_power,
|
||||
}
|
||||
|
||||
|
||||
class HomeAssistantConnectZBT2ConfigFlow(
|
||||
ZBT2FirmwareMixin,
|
||||
@@ -130,6 +112,7 @@ class HomeAssistantConnectZBT2ConfigFlow(
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
ZIGBEE_BAUDRATE = 460800
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize the config flow."""
|
||||
@@ -213,14 +196,14 @@ class HomeAssistantConnectZBT2OptionsFlowHandler(
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._usb_info = get_usb_service_info(self._config_entry)
|
||||
self._usb_info = get_usb_service_info(self.config_entry)
|
||||
self._hardware_name = HARDWARE_NAME
|
||||
self._device = self._usb_info.device
|
||||
|
||||
self._probed_firmware_info = FirmwareInfo(
|
||||
device=self._device,
|
||||
firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]),
|
||||
firmware_version=self._config_entry.data[FIRMWARE_VERSION],
|
||||
firmware_type=ApplicationType(self.config_entry.data[FIRMWARE]),
|
||||
firmware_version=self.config_entry.data[FIRMWARE_VERSION],
|
||||
source="guess",
|
||||
owners=[],
|
||||
)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Constants for the Home Assistant Connect ZBT-2 integration."""
|
||||
|
||||
from homeassistant.generated.countries import COUNTRIES
|
||||
|
||||
DOMAIN = "homeassistant_connect_zbt2"
|
||||
|
||||
NABU_CASA_FIRMWARE_RELEASES_URL = (
|
||||
@@ -19,59 +17,3 @@ VID = "vid"
|
||||
DEVICE = "device"
|
||||
|
||||
HARDWARE_NAME = "Home Assistant Connect ZBT-2"
|
||||
|
||||
RADIO_TX_POWER_DBM_DEFAULT = 8
|
||||
RADIO_TX_POWER_DBM_BY_COUNTRY = {
|
||||
# EU Member States
|
||||
"AT": 10,
|
||||
"BE": 10,
|
||||
"BG": 10,
|
||||
"HR": 10,
|
||||
"CY": 10,
|
||||
"CZ": 10,
|
||||
"DK": 10,
|
||||
"EE": 10,
|
||||
"FI": 10,
|
||||
"FR": 10,
|
||||
"DE": 10,
|
||||
"GR": 10,
|
||||
"HU": 10,
|
||||
"IE": 10,
|
||||
"IT": 10,
|
||||
"LV": 10,
|
||||
"LT": 10,
|
||||
"LU": 10,
|
||||
"MT": 10,
|
||||
"NL": 10,
|
||||
"PL": 10,
|
||||
"PT": 10,
|
||||
"RO": 10,
|
||||
"SK": 10,
|
||||
"SI": 10,
|
||||
"ES": 10,
|
||||
"SE": 10,
|
||||
# EEA Members
|
||||
"IS": 10,
|
||||
"LI": 10,
|
||||
"NO": 10,
|
||||
# Standards harmonized with RED or ETSI
|
||||
"CH": 10,
|
||||
"GB": 10,
|
||||
"TR": 10,
|
||||
"AL": 10,
|
||||
"BA": 10,
|
||||
"GE": 10,
|
||||
"MD": 10,
|
||||
"ME": 10,
|
||||
"MK": 10,
|
||||
"RS": 10,
|
||||
"UA": 10,
|
||||
# Other CEPT nations
|
||||
"AD": 10,
|
||||
"AZ": 10,
|
||||
"MC": 10,
|
||||
"SM": 10,
|
||||
"VA": 10,
|
||||
}
|
||||
|
||||
assert set(RADIO_TX_POWER_DBM_BY_COUNTRY) <= COUNTRIES
|
||||
|
||||
@@ -28,7 +28,7 @@ from homeassistant.config_entries import (
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.data_entry_flow import AbortFlow, progress_step
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
@@ -97,12 +97,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
self.addon_uninstall_task: asyncio.Task | None = None
|
||||
self.firmware_install_task: asyncio.Task[None] | None = None
|
||||
self.installing_firmware_name: str | None = None
|
||||
self._install_otbr_addon_task: asyncio.Task[None] | None = None
|
||||
self._start_otbr_addon_task: asyncio.Task[None] | None = None
|
||||
|
||||
# Progress flow steps cannot abort so we need to store the abort reason and then
|
||||
# re-raise it in a dedicated step
|
||||
self._progress_error: AbortFlow | None = None
|
||||
|
||||
def _get_translation_placeholders(self) -> dict[str, str]:
|
||||
"""Shared translation placeholders."""
|
||||
@@ -112,11 +106,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
if self._probed_firmware_info is not None
|
||||
else "unknown"
|
||||
),
|
||||
"firmware_name": (
|
||||
self.installing_firmware_name
|
||||
if self.installing_firmware_name is not None
|
||||
else "unknown"
|
||||
),
|
||||
"model": self._hardware_name,
|
||||
}
|
||||
|
||||
@@ -193,22 +182,22 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return self.async_show_progress(
|
||||
step_id=step_id,
|
||||
progress_action="install_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": firmware_name,
|
||||
},
|
||||
progress_task=self.firmware_install_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self.firmware_install_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
return self.async_show_progress_done(
|
||||
next_step_id=err.reason,
|
||||
)
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception("Failed to flash firmware")
|
||||
self._progress_error = AbortFlow(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
return self.async_show_progress_done(next_step_id="firmware_install_failed")
|
||||
finally:
|
||||
self.firmware_install_task = None
|
||||
|
||||
@@ -252,10 +241,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
_LOGGER.debug("Skipping firmware upgrade due to index download failure")
|
||||
return
|
||||
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
@@ -284,10 +270,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
return
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
self._probed_firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
@@ -330,6 +313,41 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
await otbr_manager.async_start_addon_waiting()
|
||||
|
||||
async def async_step_firmware_download_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware download failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_firmware_install_failed(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when firmware install failed."""
|
||||
assert self.installing_firmware_name is not None
|
||||
return self.async_abort(
|
||||
reason="fw_install_failed",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"firmware_name": self.installing_firmware_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_unsupported_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when unsupported firmware is detected."""
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_zigbee_installation_type(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -438,10 +456,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
# This step is necessary to prevent `user_input` from being passed through
|
||||
return await self.async_step_continue_zigbee()
|
||||
|
||||
def _extra_zha_hardware_options(self) -> dict[str, Any]:
|
||||
"""Return extra ZHA hardware options."""
|
||||
return {}
|
||||
|
||||
async def async_step_continue_zigbee(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -464,7 +478,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
"radio_type": "ezsp",
|
||||
"flow_strategy": self._zigbee_flow_strategy,
|
||||
**self._extra_zha_hardware_options(),
|
||||
},
|
||||
)
|
||||
return self._continue_zha_flow(result)
|
||||
@@ -493,15 +506,16 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Install Thread firmware."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_step_progress_failed(
|
||||
@progress_step(
|
||||
description_placeholders=lambda self: {
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
}
|
||||
)
|
||||
async def async_step_install_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Abort when progress step failed."""
|
||||
assert self._progress_error is not None
|
||||
raise self._progress_error
|
||||
|
||||
async def _async_install_otbr_addon(self) -> None:
|
||||
"""Do the work of installing the OTBR addon."""
|
||||
"""Show progress dialog for installing the OTBR addon."""
|
||||
addon_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(addon_manager)
|
||||
|
||||
@@ -519,39 +533,18 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_step_install_otbr_addon(
|
||||
return await self.async_step_finish_thread_installation()
|
||||
|
||||
@progress_step(
|
||||
description_placeholders=lambda self: {
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
}
|
||||
)
|
||||
async def async_step_start_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show progress dialog for installing the OTBR addon."""
|
||||
if self._install_otbr_addon_task is None:
|
||||
self._install_otbr_addon_task = self.hass.async_create_task(
|
||||
self._async_install_otbr_addon(),
|
||||
"Install OTBR addon",
|
||||
)
|
||||
|
||||
if not self._install_otbr_addon_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="install_otbr_addon",
|
||||
progress_action="install_otbr_addon",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
},
|
||||
progress_task=self._install_otbr_addon_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._install_otbr_addon_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._install_otbr_addon_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="finish_thread_installation")
|
||||
|
||||
async def _async_start_otbr_addon(self) -> None:
|
||||
"""Do the work of starting the OTBR addon."""
|
||||
"""Configure OTBR to point to the SkyConnect and run the addon."""
|
||||
try:
|
||||
await self._configure_and_start_otbr_addon()
|
||||
except AddonError as err:
|
||||
@@ -564,36 +557,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
},
|
||||
) from err
|
||||
|
||||
async def async_step_start_otbr_addon(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Configure OTBR to point to the SkyConnect and run the addon."""
|
||||
if self._start_otbr_addon_task is None:
|
||||
self._start_otbr_addon_task = self.hass.async_create_task(
|
||||
self._async_start_otbr_addon(),
|
||||
"Start OTBR addon",
|
||||
)
|
||||
|
||||
if not self._start_otbr_addon_task.done():
|
||||
return self.async_show_progress(
|
||||
step_id="start_otbr_addon",
|
||||
progress_action="start_otbr_addon",
|
||||
description_placeholders={
|
||||
**self._get_translation_placeholders(),
|
||||
"addon_name": get_otbr_addon_manager(self.hass).addon_name,
|
||||
},
|
||||
progress_task=self._start_otbr_addon_task,
|
||||
)
|
||||
|
||||
try:
|
||||
await self._start_otbr_addon_task
|
||||
except AbortFlow as err:
|
||||
self._progress_error = err
|
||||
return self.async_show_progress_done(next_step_id="progress_failed")
|
||||
finally:
|
||||
self._start_otbr_addon_task = None
|
||||
|
||||
return self.async_show_progress_done(next_step_id="pre_confirm_otbr")
|
||||
return await self.async_step_pre_confirm_otbr()
|
||||
|
||||
async def async_step_pre_confirm_otbr(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -275,17 +275,17 @@ class HomeAssistantSkyConnectOptionsFlowHandler(
|
||||
"""Instantiate options flow."""
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._usb_info = get_usb_service_info(self._config_entry)
|
||||
self._usb_info = get_usb_service_info(self.config_entry)
|
||||
self._hw_variant = HardwareVariant.from_usb_product_name(
|
||||
self._config_entry.data[PRODUCT]
|
||||
self.config_entry.data[PRODUCT]
|
||||
)
|
||||
self._hardware_name = self._hw_variant.full_name
|
||||
self._device = self._usb_info.device
|
||||
|
||||
self._probed_firmware_info = FirmwareInfo(
|
||||
device=self._device,
|
||||
firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]),
|
||||
firmware_version=self._config_entry.data[FIRMWARE_VERSION],
|
||||
firmware_type=ApplicationType(self.config_entry.data[FIRMWARE]),
|
||||
firmware_version=self.config_entry.data[FIRMWARE_VERSION],
|
||||
source="guess",
|
||||
owners=[],
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user