mirror of
https://github.com/home-assistant/core.git
synced 2025-12-19 22:38:02 +00:00
Compare commits
152 Commits
matter_tes
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
065b0eb5b2 | ||
|
|
6a1d86d5db | ||
|
|
f99a73ef28 | ||
|
|
0436d30062 | ||
|
|
24b6b5452b | ||
|
|
8b91ebfe30 | ||
|
|
37d3b73c1b | ||
|
|
c881d9809e | ||
|
|
85dfe3a107 | ||
|
|
d8a468833e | ||
|
|
5bbd56b8e6 | ||
|
|
d0411b6613 | ||
|
|
293fbebef2 | ||
|
|
cfe542acb9 | ||
|
|
8da323d4b7 | ||
|
|
b2edf637cc | ||
|
|
de61a45de1 | ||
|
|
d9324cb0e4 | ||
|
|
4a464f601c | ||
|
|
43e9c24c18 | ||
|
|
1c3492b4c2 | ||
|
|
e0cb56a38c | ||
|
|
6e05cc4898 | ||
|
|
6f9dc2e5a2 | ||
|
|
ddb1ae371d | ||
|
|
6553337b79 | ||
|
|
aedc729d57 | ||
|
|
31fa69b609 | ||
|
|
b819a866b9 | ||
|
|
6cc7d83def | ||
|
|
5154418051 | ||
|
|
7e63c12b95 | ||
|
|
d17e951591 | ||
|
|
9198e5f56d | ||
|
|
97d7e0e01e | ||
|
|
4d5b8c4b08 | ||
|
|
abb011311e | ||
|
|
92cf7623fa | ||
|
|
aedf4c881b | ||
|
|
74baf44c83 | ||
|
|
9afb4a9eb8 | ||
|
|
e1967bef9a | ||
|
|
f17b6aa9e4 | ||
|
|
dd6d7397d9 | ||
|
|
aeabd2d2cc | ||
|
|
d7af2f39c2 | ||
|
|
a674ad11bc | ||
|
|
ccb64d7fd8 | ||
|
|
36691e2a3d | ||
|
|
8971f75f13 | ||
|
|
173db170af | ||
|
|
881851a4f6 | ||
|
|
4b4b64e939 | ||
|
|
e721c1a092 | ||
|
|
0933c9fe51 | ||
|
|
632b3e5dc3 | ||
|
|
434cb48344 | ||
|
|
86d4c3cbbf | ||
|
|
3019f9041c | ||
|
|
9e0a3dee08 | ||
|
|
fefe7d9e5d | ||
|
|
4c382cedff | ||
|
|
6ffd05313b | ||
|
|
9be0214021 | ||
|
|
54300430b7 | ||
|
|
a25038259e | ||
|
|
81be14c8f1 | ||
|
|
62464b83dc | ||
|
|
beb909528c | ||
|
|
ef28715360 | ||
|
|
78cc41fdc0 | ||
|
|
6a868ca5cc | ||
|
|
f43dead38c | ||
|
|
86163252e1 | ||
|
|
0cd5202596 | ||
|
|
33dcde7de1 | ||
|
|
c449b2e2e8 | ||
|
|
f40f7072c8 | ||
|
|
4163ecd833 | ||
|
|
9c59d528af | ||
|
|
c2440c4ebd | ||
|
|
cb275f65ba | ||
|
|
b1923df3ca | ||
|
|
7ddfd155ca | ||
|
|
e01df6d10d | ||
|
|
54010728d5 | ||
|
|
62a3b3827f | ||
|
|
b9abfba20f | ||
|
|
eca9f36e55 | ||
|
|
3c865c6f41 | ||
|
|
3b32c4bcbf | ||
|
|
fcdc1cfed9 | ||
|
|
0fd782c4ab | ||
|
|
bbcaf69973 | ||
|
|
f2b713acac | ||
|
|
6c944d6b15 | ||
|
|
4dd3abb16a | ||
|
|
d2672b9ddf | ||
|
|
ff30492919 | ||
|
|
b5ccdf8165 | ||
|
|
b3c745cfa7 | ||
|
|
67aeafa797 | ||
|
|
3d71b6de44 | ||
|
|
5349045932 | ||
|
|
4960871c84 | ||
|
|
af3861cd6b | ||
|
|
f9a070e9b3 | ||
|
|
fd503b2e33 | ||
|
|
e5a73fcf57 | ||
|
|
6991e01489 | ||
|
|
c8636ee6f3 | ||
|
|
52229dc5a8 | ||
|
|
f013455843 | ||
|
|
cae5bca546 | ||
|
|
49299b06c6 | ||
|
|
8e39027ad5 | ||
|
|
2a1ce2df61 | ||
|
|
7a6d929150 | ||
|
|
6f4a112dbb | ||
|
|
2197b910fb | ||
|
|
7e2a9cd7f9 | ||
|
|
e7ed7a8ed2 | ||
|
|
9ba2d0defe | ||
|
|
231300919c | ||
|
|
664c50586f | ||
|
|
43b9ecfc2b | ||
|
|
f1237ed52a | ||
|
|
ecf8f55cc4 | ||
|
|
ff36693057 | ||
|
|
005785997c | ||
|
|
9917b82b66 | ||
|
|
9c927406ac | ||
|
|
972d95602a | ||
|
|
5e0549a18f | ||
|
|
bcbb159fb2 | ||
|
|
0123ca656a | ||
|
|
1f699c729c | ||
|
|
50c3fcfeba | ||
|
|
2af1e098cc | ||
|
|
c418d9750b | ||
|
|
e96d614076 | ||
|
|
f0a5e0a023 | ||
|
|
6ac6b86060 | ||
|
|
3909171b1a | ||
|
|
769029505f | ||
|
|
080ec3524b | ||
|
|
48d671ad5f | ||
|
|
7115db5d22 | ||
|
|
d0c8792e4b | ||
|
|
84d7c37502 | ||
|
|
8a10638470 | ||
|
|
10dd53ffc2 |
3
.github/copilot-instructions.md
vendored
3
.github/copilot-instructions.md
vendored
@@ -51,6 +51,9 @@ rules:
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
|
||||
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -551,7 +551,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -567,6 +567,7 @@ homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
homeassistant.components.waqi.*
|
||||
homeassistant.components.water_heater.*
|
||||
homeassistant.components.watts.*
|
||||
homeassistant.components.watttime.*
|
||||
homeassistant.components.weather.*
|
||||
homeassistant.components.webhook.*
|
||||
|
||||
10
CODEOWNERS
generated
10
CODEOWNERS
generated
@@ -664,8 +664,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/heos/ @andrewsayre
|
||||
/homeassistant/components/here_travel_time/ @eifinger
|
||||
/tests/components/here_travel_time/ @eifinger
|
||||
/homeassistant/components/hikvision/ @mezz64
|
||||
/tests/components/hikvision/ @mezz64
|
||||
/homeassistant/components/hikvision/ @mezz64 @ptarjan
|
||||
/tests/components/hikvision/ @mezz64 @ptarjan
|
||||
/homeassistant/components/hikvisioncam/ @fbradyirl
|
||||
/homeassistant/components/hisense_aehw4a1/ @bannhead
|
||||
/tests/components/hisense_aehw4a1/ @bannhead
|
||||
@@ -1195,8 +1195,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/overseerr/ @joostlek @AmGarera
|
||||
/tests/components/overseerr/ @joostlek @AmGarera
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1798,6 +1798,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
/homeassistant/components/watts/ @theobld-ww @devender-verma-ww @ssi-spyro
|
||||
/tests/components/watts/ @theobld-ww @devender-verma-ww @ssi-spyro
|
||||
/homeassistant/components/watttime/ @bachya
|
||||
/tests/components/watttime/ @bachya
|
||||
/homeassistant/components/waze_travel_time/ @eifinger
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -24,7 +24,7 @@ ENV \
|
||||
COPY rootfs /
|
||||
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:f394f6329f5389a4c9a7fc54b09fdec9621bbb78bf7a672b973440bbdfb02241 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
|
||||
@@ -402,6 +402,8 @@ class AuthManager:
|
||||
if user.is_owner:
|
||||
raise ValueError("Unable to deactivate the owner")
|
||||
await self._store.async_deactivate_user(user)
|
||||
for refresh_token in list(user.refresh_tokens.values()):
|
||||
self.async_remove_refresh_token(refresh_token)
|
||||
|
||||
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
|
||||
"""Remove credentials."""
|
||||
|
||||
@@ -624,13 +624,16 @@ async def async_enable_logging(
|
||||
|
||||
if log_file is None:
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
if "SUPERVISOR" in os.environ and "HA_DUPLICATE_LOG_FILE" not in os.environ:
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
def rename_old_file() -> None:
|
||||
"""Rename old log file in executor."""
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
|
||||
await hass.async_add_executor_job(rename_old_file)
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
|
||||
@@ -18,7 +18,7 @@ from .coordinator import (
|
||||
ActronAirSystemCoordinator,
|
||||
)
|
||||
|
||||
PLATFORM = [Platform.CLIMATE]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
@@ -50,10 +50,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) ->
|
||||
system_coordinators=system_coordinators,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORM)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORM)
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -148,7 +148,7 @@ class ActronSystemClimate(BaseClimateEntity):
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
"""Return the current fan mode."""
|
||||
fan_mode = self._status.user_aircon_settings.fan_mode
|
||||
fan_mode = self._status.user_aircon_settings.base_fan_mode
|
||||
return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode)
|
||||
|
||||
@property
|
||||
|
||||
30
homeassistant/components/actron_air/icons.json
Normal file
30
homeassistant/components/actron_air/icons.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"entity": {
|
||||
"switch": {
|
||||
"away_mode": {
|
||||
"default": "mdi:home-export-outline",
|
||||
"state": {
|
||||
"off": "mdi:home-import-outline"
|
||||
}
|
||||
},
|
||||
"continuous_fan": {
|
||||
"default": "mdi:fan",
|
||||
"state": {
|
||||
"off": "mdi:fan-off"
|
||||
}
|
||||
},
|
||||
"quiet_mode": {
|
||||
"default": "mdi:volume-low",
|
||||
"state": {
|
||||
"off": "mdi:volume-high"
|
||||
}
|
||||
},
|
||||
"turbo_mode": {
|
||||
"default": "mdi:fan-plus",
|
||||
"state": {
|
||||
"off": "mdi:fan"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,5 +13,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["actron-neo-api==0.2.0"]
|
||||
"requirements": ["actron-neo-api==0.4.1"]
|
||||
}
|
||||
|
||||
@@ -32,6 +32,22 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"away_mode": {
|
||||
"name": "Away mode"
|
||||
},
|
||||
"continuous_fan": {
|
||||
"name": "Continuous fan"
|
||||
},
|
||||
"quiet_mode": {
|
||||
"name": "Quiet mode"
|
||||
},
|
||||
"turbo_mode": {
|
||||
"name": "Turbo mode"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_error": {
|
||||
"message": "Authentication failed, please reauthenticate"
|
||||
|
||||
110
homeassistant/components/actron_air/switch.py
Normal file
110
homeassistant/components/actron_air/switch.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Switch platform for Actron Air integration."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ActronAirSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Class describing Actron Air switch entities."""
|
||||
|
||||
is_on_fn: Callable[[ActronAirSystemCoordinator], bool]
|
||||
set_fn: Callable[[ActronAirSystemCoordinator, bool], Awaitable[None]]
|
||||
is_supported_fn: Callable[[ActronAirSystemCoordinator], bool] = lambda _: True
|
||||
|
||||
|
||||
SWITCHES: tuple[ActronAirSwitchEntityDescription, ...] = (
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="away_mode",
|
||||
translation_key="away_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.away_mode,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_away_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="continuous_fan",
|
||||
translation_key="continuous_fan",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.continuous_fan_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_continuous_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="quiet_mode",
|
||||
translation_key="quiet_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.quiet_mode_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_quiet_mode(enabled),
|
||||
),
|
||||
ActronAirSwitchEntityDescription(
|
||||
key="turbo_mode",
|
||||
translation_key="turbo_mode",
|
||||
is_on_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_enabled,
|
||||
set_fn=lambda coordinator,
|
||||
enabled: coordinator.data.user_aircon_settings.set_turbo_mode(enabled),
|
||||
is_supported_fn=lambda coordinator: coordinator.data.user_aircon_settings.turbo_supported,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ActronAirConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Actron Air switch entities."""
|
||||
system_coordinators = entry.runtime_data.system_coordinators
|
||||
async_add_entities(
|
||||
ActronAirSwitch(coordinator, description)
|
||||
for coordinator in system_coordinators.values()
|
||||
for description in SWITCHES
|
||||
if description.is_supported_fn(coordinator)
|
||||
)
|
||||
|
||||
|
||||
class ActronAirSwitch(CoordinatorEntity[ActronAirSystemCoordinator], SwitchEntity):
|
||||
"""Actron Air switch."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
entity_description: ActronAirSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ActronAirSystemCoordinator,
|
||||
description: ActronAirSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.serial_number}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.serial_number)},
|
||||
manufacturer="Actron Air",
|
||||
name=coordinator.data.ac_system.system_name,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_fn(self.coordinator, True)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_fn(self.coordinator, False)
|
||||
@@ -88,21 +88,11 @@ class AirPatrolClimate(AirPatrolEntity, ClimateEntity):
|
||||
super().__init__(coordinator, unit_id)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{unit_id}"
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data."""
|
||||
return self.device_data.get("climate") or {}
|
||||
|
||||
@property
|
||||
def params(self) -> dict[str, Any]:
|
||||
"""Return the current parameters for the climate entity."""
|
||||
return self.climate_data.get("ParametersData") or {}
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and bool(self.climate_data)
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
|
||||
@@ -10,7 +10,7 @@ from homeassistant.const import Platform
|
||||
DOMAIN = "airpatrol"
|
||||
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR]
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
AIRPATROL_ERRORS = (AirPatrolAuthenticationError, AirPatrolError)
|
||||
|
||||
@@ -38,7 +38,17 @@ class AirPatrolEntity(CoordinatorEntity[AirPatrolDataUpdateCoordinator]):
|
||||
"""Return the device data."""
|
||||
return self.coordinator.data[self._unit_id]
|
||||
|
||||
@property
|
||||
def climate_data(self) -> dict[str, Any]:
|
||||
"""Return the climate data for this unit."""
|
||||
return self.device_data["climate"]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._unit_id in self.coordinator.data
|
||||
return (
|
||||
super().available
|
||||
and self._unit_id in self.coordinator.data
|
||||
and "climate" in self.device_data
|
||||
and self.climate_data is not None
|
||||
)
|
||||
|
||||
89
homeassistant/components/airpatrol/sensor.py
Normal file
89
homeassistant/components/airpatrol/sensor.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Sensors for AirPatrol integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirPatrolConfigEntry
|
||||
from .coordinator import AirPatrolDataUpdateCoordinator
|
||||
from .entity import AirPatrolEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirPatrolSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes AirPatrol sensor entity."""
|
||||
|
||||
data_field: str
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = (
|
||||
AirPatrolSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
data_field="RoomTemp",
|
||||
),
|
||||
AirPatrolSensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
data_field="RoomHumidity",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirPatrolConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AirPatrol sensors."""
|
||||
coordinator = config_entry.runtime_data
|
||||
units = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
AirPatrolSensor(coordinator, unit_id, description)
|
||||
for unit_id, unit in units.items()
|
||||
for description in SENSOR_DESCRIPTIONS
|
||||
if "climate" in unit and unit["climate"] is not None
|
||||
)
|
||||
|
||||
|
||||
class AirPatrolSensor(AirPatrolEntity, SensorEntity):
|
||||
"""AirPatrol sensor entity."""
|
||||
|
||||
entity_description: AirPatrolSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirPatrolDataUpdateCoordinator,
|
||||
unit_id: str,
|
||||
description: AirPatrolSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize AirPatrol sensor."""
|
||||
super().__init__(coordinator, unit_id)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.unique_id}-{unit_id}-{description.key}"
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
if value := self.climate_data.get(self.entity_description.data_field):
|
||||
return float(value)
|
||||
return None
|
||||
@@ -4,10 +4,10 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityStateTriggerBase,
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
@@ -21,7 +21,7 @@ def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
class EntityStateTriggerRequiredFeatures(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_required_features: int
|
||||
@@ -38,21 +38,21 @@ class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityStateTriggerBase]:
|
||||
) -> type[EntityTargetStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_to_state = to_state
|
||||
_to_states = {to_state}
|
||||
_required_features = required_features
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": make_conditional_entity_state_trigger(
|
||||
"armed": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
AlarmControlPanelState.ARMING,
|
||||
@@ -89,8 +89,12 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
|
||||
"disarmed": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.DISARMED
|
||||
),
|
||||
"triggered": make_entity_target_state_trigger(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -4,13 +4,28 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.exceptions import ExpiredAccessTokenError, UnknownEndpointError
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import VolumeConverter
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
|
||||
@@ -44,6 +59,107 @@ class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Anglian Water's API."""
|
||||
try:
|
||||
return await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
await self.api.update(self.config_entry.data[CONF_ACCOUNT_NUMBER])
|
||||
await self._insert_statistics()
|
||||
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
|
||||
raise UpdateFailed from err
|
||||
|
||||
async def _insert_statistics(self) -> None:
|
||||
"""Insert statistics for water meters into Home Assistant."""
|
||||
for meter in self.api.meters.values():
|
||||
id_prefix = (
|
||||
f"{self.config_entry.data[CONF_ACCOUNT_NUMBER]}_{meter.serial_number}"
|
||||
)
|
||||
usage_statistic_id = f"{DOMAIN}:{id_prefix}_usage".lower()
|
||||
_LOGGER.debug("Updating statistics for meter %s", meter.serial_number)
|
||||
name_prefix = (
|
||||
f"Anglian Water {self.config_entry.data[CONF_ACCOUNT_NUMBER]} "
|
||||
f"{meter.serial_number}"
|
||||
)
|
||||
usage_metadata = StatisticMetaData(
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{name_prefix} Usage",
|
||||
source=DOMAIN,
|
||||
statistic_id=usage_statistic_id,
|
||||
unit_class=VolumeConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
)
|
||||
last_stat = await get_instance(self.hass).async_add_executor_job(
|
||||
get_last_statistics, self.hass, 1, usage_statistic_id, True, set()
|
||||
)
|
||||
if not last_stat:
|
||||
_LOGGER.debug("Updating statistics for the first time")
|
||||
usage_sum = 0.0
|
||||
last_stats_time = None
|
||||
else:
|
||||
if not meter.readings or len(meter.readings) == 0:
|
||||
_LOGGER.debug("No recent usage statistics found, skipping update")
|
||||
continue
|
||||
# Anglian Water stats are hourly, the read_at time is the time that the meter took the reading
|
||||
# We remove 1 hour from this so that the data is shown in the correct hour on the dashboards
|
||||
parsed_read_at = dt_util.parse_datetime(meter.readings[0]["read_at"])
|
||||
if not parsed_read_at:
|
||||
_LOGGER.debug(
|
||||
"Could not parse read_at time %s, skipping update",
|
||||
meter.readings[0]["read_at"],
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
_LOGGER.debug("Getting statistics at %s", start)
|
||||
for end in (start + timedelta(seconds=1), None):
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
self.hass,
|
||||
start,
|
||||
end,
|
||||
{
|
||||
usage_statistic_id,
|
||||
},
|
||||
"hour",
|
||||
None,
|
||||
{"sum"},
|
||||
)
|
||||
if stats:
|
||||
break
|
||||
if end:
|
||||
_LOGGER.debug(
|
||||
"Not found, trying to find oldest statistic after %s",
|
||||
start,
|
||||
)
|
||||
assert stats
|
||||
|
||||
def _safe_get_sum(records: list[Any]) -> float:
|
||||
if records and "sum" in records[0]:
|
||||
return float(records[0]["sum"])
|
||||
return 0.0
|
||||
|
||||
usage_sum = _safe_get_sum(stats.get(usage_statistic_id, []))
|
||||
last_stats_time = stats[usage_statistic_id][0]["start"]
|
||||
|
||||
usage_statistics = []
|
||||
|
||||
for read in meter.readings:
|
||||
parsed_read_at = dt_util.parse_datetime(read["read_at"])
|
||||
if not parsed_read_at:
|
||||
_LOGGER.debug(
|
||||
"Could not parse read_at time %s, skipping reading",
|
||||
read["read_at"],
|
||||
)
|
||||
continue
|
||||
start = dt_util.as_local(parsed_read_at) - timedelta(hours=1)
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
continue
|
||||
usage_state = max(0, read["consumption"] / 1000)
|
||||
usage_sum = max(0, read["read"])
|
||||
usage_statistics.append(
|
||||
StatisticData(
|
||||
start=start,
|
||||
state=usage_state,
|
||||
sum=usage_sum,
|
||||
)
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Adding %s statistics for %s", len(usage_statistics), usage_statistic_id
|
||||
)
|
||||
async_add_external_statistics(self.hass, usage_metadata, usage_statistics)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"domain": "anglian_water",
|
||||
"name": "Anglian Water",
|
||||
"after_dependencies": ["recorder"],
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
|
||||
@@ -1,16 +1,22 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
|
||||
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
|
||||
"idle": make_entity_target_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.LISTENING
|
||||
),
|
||||
"processing": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
),
|
||||
"responding": make_entity_target_state_trigger(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["autarco==3.2.0"]
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ from homeassistant.const import (
|
||||
CONF_EVENT_DATA,
|
||||
CONF_ID,
|
||||
CONF_MODE,
|
||||
CONF_OPTIONS,
|
||||
CONF_PATH,
|
||||
CONF_PLATFORM,
|
||||
CONF_TRIGGERS,
|
||||
@@ -130,9 +131,12 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"humidifier",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"siren",
|
||||
"switch",
|
||||
"text",
|
||||
"update",
|
||||
@@ -1214,7 +1218,7 @@ def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
|
||||
return trigger_conf[CONF_ENTITY_ID] # type: ignore[no-any-return]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "calendar":
|
||||
return [trigger_conf[CONF_ENTITY_ID]]
|
||||
return [trigger_conf[CONF_OPTIONS][CONF_ENTITY_ID]]
|
||||
|
||||
if trigger_conf[CONF_PLATFORM] == "zone":
|
||||
return trigger_conf[CONF_ENTITY_ID] + [trigger_conf[CONF_ZONE]] # type: ignore[no-any-return]
|
||||
|
||||
@@ -4,7 +4,7 @@ from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import EntityStateTriggerBase, Trigger
|
||||
from homeassistant.helpers.trigger import EntityTargetStateTriggerBase, Trigger
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import DOMAIN, BinarySensorDeviceClass
|
||||
@@ -20,7 +20,7 @@ def get_device_class_or_undefined(
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class BinarySensorOnOffTrigger(EntityStateTriggerBase):
|
||||
class BinarySensorOnOffTrigger(EntityTargetStateTriggerBase):
|
||||
"""Class for binary sensor on/off triggers."""
|
||||
|
||||
_device_class: BinarySensorDeviceClass | None
|
||||
@@ -47,7 +47,7 @@ def make_binary_sensor_trigger(
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
_to_state = to_state
|
||||
_to_states = {to_state}
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "blackbird",
|
||||
"name": "Monoprice Blackbird Matrix Switch",
|
||||
"codeowners": [],
|
||||
"disabled": "This integration is disabled because it references pyserial-asyncio, which does blocking I/O in the asyncio loop and is not maintained.",
|
||||
"documentation": "https://www.home-assistant.io/integrations/blackbird",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyblackbird"],
|
||||
|
||||
@@ -2,29 +2,30 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_PLATFORM
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_OPTIONS
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
async_track_time_interval,
|
||||
)
|
||||
from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import CalendarEntity, CalendarEvent
|
||||
from .const import DATA_COMPONENT, DOMAIN
|
||||
from .const import DATA_COMPONENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,13 +33,17 @@ EVENT_START = "start"
|
||||
EVENT_END = "end"
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=15)
|
||||
|
||||
TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
|
||||
_OPTIONS_SCHEMA_DICT = {
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
|
||||
_CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): DOMAIN,
|
||||
vol.Required(CONF_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
|
||||
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
|
||||
}
|
||||
vol.Required(CONF_OPTIONS): _OPTIONS_SCHEMA_DICT,
|
||||
},
|
||||
)
|
||||
|
||||
# mypy: disallow-any-generics
|
||||
@@ -169,14 +174,14 @@ class CalendarEventListener:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
job: HassJob[..., Coroutine[Any, Any, None] | Any],
|
||||
trigger_data: dict[str, Any],
|
||||
action_runner: TriggerActionRunner,
|
||||
trigger_payload: dict[str, Any],
|
||||
fetcher: QueuedEventFetcher,
|
||||
) -> None:
|
||||
"""Initialize CalendarEventListener."""
|
||||
self._hass = hass
|
||||
self._job = job
|
||||
self._trigger_data = trigger_data
|
||||
self._action_runner = action_runner
|
||||
self._trigger_payload = trigger_payload
|
||||
self._unsub_event: CALLBACK_TYPE | None = None
|
||||
self._unsub_refresh: CALLBACK_TYPE | None = None
|
||||
self._fetcher = fetcher
|
||||
@@ -233,15 +238,11 @@ class CalendarEventListener:
|
||||
while self._events and self._events[0].trigger_time <= now:
|
||||
queued_event = self._events.pop(0)
|
||||
_LOGGER.debug("Dispatching event: %s", queued_event.event)
|
||||
self._hass.async_run_hass_job(
|
||||
self._job,
|
||||
{
|
||||
"trigger": {
|
||||
**self._trigger_data,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
},
|
||||
)
|
||||
payload = {
|
||||
**self._trigger_payload,
|
||||
"calendar_event": queued_event.event.as_dict(),
|
||||
}
|
||||
self._action_runner(payload, "calendar event state change")
|
||||
|
||||
async def _handle_refresh(self, now_utc: datetime.datetime) -> None:
|
||||
"""Handle core config update."""
|
||||
@@ -259,31 +260,69 @@ class CalendarEventListener:
|
||||
self._listen_next_calendar_event()
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
trigger_info: TriggerInfo,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach trigger for the specified calendar."""
|
||||
entity_id = config[CONF_ENTITY_ID]
|
||||
event_type = config[CONF_EVENT]
|
||||
offset = config[CONF_OFFSET]
|
||||
class EventTrigger(Trigger):
|
||||
"""Calendar event trigger."""
|
||||
|
||||
# Validate the entity id is valid
|
||||
get_entity(hass, entity_id)
|
||||
_options: dict[str, Any]
|
||||
|
||||
trigger_data = {
|
||||
**trigger_info["trigger_data"],
|
||||
"platform": DOMAIN,
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
listener = CalendarEventListener(
|
||||
hass,
|
||||
HassJob(action),
|
||||
trigger_data,
|
||||
queued_event_fetcher(event_fetcher(hass, entity_id), event_type, offset),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = move_top_level_schema_fields_to_options(
|
||||
complete_config, _OPTIONS_SCHEMA_DICT
|
||||
)
|
||||
return await super().async_validate_complete_config(hass, complete_config)
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, _CONFIG_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize trigger."""
|
||||
super().__init__(hass, config)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
self._options = config.options
|
||||
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach a trigger."""
|
||||
|
||||
entity_id = self._options[CONF_ENTITY_ID]
|
||||
event_type = self._options[CONF_EVENT]
|
||||
offset = self._options[CONF_OFFSET]
|
||||
|
||||
# Validate the entity id is valid
|
||||
get_entity(self._hass, entity_id)
|
||||
|
||||
trigger_data = {
|
||||
"event": event_type,
|
||||
"offset": offset,
|
||||
}
|
||||
listener = CalendarEventListener(
|
||||
self._hass,
|
||||
run_action,
|
||||
trigger_data,
|
||||
queued_event_fetcher(
|
||||
event_fetcher(self._hass, entity_id), event_type, offset
|
||||
),
|
||||
)
|
||||
await listener.async_attach()
|
||||
return listener.async_detach
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"_": EventTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for calendars."""
|
||||
return TRIGGERS
|
||||
|
||||
@@ -98,6 +98,9 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"hvac_mode_changed": {
|
||||
"trigger": "mdi:thermostat"
|
||||
},
|
||||
"started_cooling": {
|
||||
"trigger": "mdi:snowflake"
|
||||
},
|
||||
@@ -107,6 +110,12 @@
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
},
|
||||
|
||||
@@ -192,12 +192,26 @@
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"number_or_entity": {
|
||||
"choices": {
|
||||
"entity": "Entity",
|
||||
"number": "Number"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
"options": {
|
||||
"above": "Above a value",
|
||||
"below": "Below a value",
|
||||
"between": "In a range",
|
||||
"outside": "Outside a range"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -298,6 +312,20 @@
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"hvac_mode_changed": {
|
||||
"description": "Triggers after the mode of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"hvac_mode": {
|
||||
"description": "The HVAC modes to trigger on.",
|
||||
"name": "Modes"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device mode changed"
|
||||
},
|
||||
"started_cooling": {
|
||||
"description": "Triggers after one or more climate-control devices start cooling.",
|
||||
"fields": {
|
||||
@@ -328,6 +356,42 @@
|
||||
},
|
||||
"name": "Climate-control device started heating"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target temperature is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target temperature is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature changed"
|
||||
},
|
||||
"target_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target temperature crossed threshold"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more climate-control devices turn off.",
|
||||
"fields": {
|
||||
|
||||
@@ -1,24 +1,65 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_TEMPERATURE, CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTargetStateTriggerBase,
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_attribute_trigger,
|
||||
make_entity_state_trigger,
|
||||
TriggerConfig,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_attribute_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
|
||||
CONF_HVAC_MODE = "hvac_mode"
|
||||
|
||||
HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(CONF_HVAC_MODE): vol.All(
|
||||
cv.ensure_list, vol.Length(min=1), [HVACMode]
|
||||
),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class HVACModeChangedTrigger(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_schema = HVAC_MODE_CHANGED_TRIGGER_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
self._to_states = set(self._options[CONF_HVAC_MODE])
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_cooling": make_entity_state_attribute_trigger(
|
||||
"hvac_mode_changed": HVACModeChangedTrigger,
|
||||
"started_cooling": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.COOLING
|
||||
),
|
||||
"started_drying": make_entity_state_attribute_trigger(
|
||||
"started_drying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.DRYING
|
||||
),
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_conditional_entity_state_trigger(
|
||||
"target_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
"target_temperature_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_entity_transition_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
HVACMode.OFF,
|
||||
@@ -32,7 +73,7 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
HVACMode.HEAT_COOL,
|
||||
},
|
||||
),
|
||||
"started_heating": make_entity_state_attribute_trigger(
|
||||
"started_heating": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
target: &trigger_climate_target
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &trigger_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -14,8 +14,67 @@
|
||||
- last
|
||||
- any
|
||||
|
||||
.number_or_entity: &number_or_entity
|
||||
required: false
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain:
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
.trigger_threshold_type: &trigger_threshold_type
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- above
|
||||
- below
|
||||
- between
|
||||
- outside
|
||||
translation_key: trigger_threshold_type
|
||||
|
||||
started_cooling: *trigger_common
|
||||
started_drying: *trigger_common
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
|
||||
hvac_mode_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
hvac_mode:
|
||||
context:
|
||||
filter_target: target
|
||||
required: true
|
||||
selector:
|
||||
state:
|
||||
hide_states:
|
||||
- unavailable
|
||||
- unknown
|
||||
multiple: true
|
||||
|
||||
target_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
target_temperature_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import CONF_SWING_SUPPORT, DOMAIN
|
||||
from .const import CONF_SEND_WAKEUP_PROMPT, CONF_SWING_SUPPORT, DOMAIN
|
||||
from .coordinator import CoolmasterConfigEntry, CoolmasterDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR]
|
||||
@@ -17,10 +17,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
"""Set up Coolmaster from a config entry."""
|
||||
host = entry.data[CONF_HOST]
|
||||
port = entry.data[CONF_PORT]
|
||||
send_wakeup_prompt = entry.data.get(CONF_SEND_WAKEUP_PROMPT, False)
|
||||
if not entry.data.get(CONF_SWING_SUPPORT):
|
||||
coolmaster = CoolMasterNet(
|
||||
host,
|
||||
port,
|
||||
send_initial_line_feed=send_wakeup_prompt,
|
||||
)
|
||||
else:
|
||||
# Swing support adds an additional request per unit. The requests are
|
||||
@@ -29,6 +31,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoolmasterConfigEntry) -
|
||||
coolmaster = CoolMasterNet(
|
||||
host,
|
||||
port,
|
||||
send_initial_line_feed=send_wakeup_prompt,
|
||||
read_timeout=5,
|
||||
swing_support=True,
|
||||
)
|
||||
|
||||
@@ -12,7 +12,13 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import CONF_SUPPORTED_MODES, CONF_SWING_SUPPORT, DEFAULT_PORT, DOMAIN
|
||||
from .const import (
|
||||
CONF_SEND_WAKEUP_PROMPT,
|
||||
CONF_SUPPORTED_MODES,
|
||||
CONF_SWING_SUPPORT,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
AVAILABLE_MODES = [
|
||||
HVACMode.OFF.value,
|
||||
@@ -25,17 +31,15 @@ AVAILABLE_MODES = [
|
||||
|
||||
MODES_SCHEMA = {vol.Required(mode, default=True): bool for mode in AVAILABLE_MODES}
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
**MODES_SCHEMA,
|
||||
vol.Required(CONF_SWING_SUPPORT, default=False): bool,
|
||||
}
|
||||
)
|
||||
DATA_SCHEMA = {
|
||||
vol.Required(CONF_HOST): str,
|
||||
**MODES_SCHEMA,
|
||||
vol.Required(CONF_SWING_SUPPORT, default=False): bool,
|
||||
}
|
||||
|
||||
|
||||
async def _validate_connection(host: str) -> bool:
|
||||
cool = CoolMasterNet(host, DEFAULT_PORT)
|
||||
async def _validate_connection(host: str, send_wakeup_prompt: bool) -> bool:
|
||||
cool = CoolMasterNet(host, DEFAULT_PORT, send_initial_line_feed=send_wakeup_prompt)
|
||||
units = await cool.status()
|
||||
return bool(units)
|
||||
|
||||
@@ -45,6 +49,14 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def _get_data_schema(self) -> vol.Schema:
|
||||
schema_dict = DATA_SCHEMA.copy()
|
||||
|
||||
if self.show_advanced_options:
|
||||
schema_dict[vol.Required(CONF_SEND_WAKEUP_PROMPT, default=False)] = bool
|
||||
|
||||
return vol.Schema(schema_dict)
|
||||
|
||||
@callback
|
||||
def _async_get_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
|
||||
supported_modes = [
|
||||
@@ -57,6 +69,7 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_PORT: DEFAULT_PORT,
|
||||
CONF_SUPPORTED_MODES: supported_modes,
|
||||
CONF_SWING_SUPPORT: data[CONF_SWING_SUPPORT],
|
||||
CONF_SEND_WAKEUP_PROMPT: data.get(CONF_SEND_WAKEUP_PROMPT, False),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -64,15 +77,19 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
data_schema = self._get_data_schema()
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA)
|
||||
return self.async_show_form(step_id="user", data_schema=data_schema)
|
||||
|
||||
errors = {}
|
||||
|
||||
host = user_input[CONF_HOST]
|
||||
|
||||
try:
|
||||
result = await _validate_connection(host)
|
||||
result = await _validate_connection(
|
||||
host, user_input.get(CONF_SEND_WAKEUP_PROMPT, False)
|
||||
)
|
||||
if not result:
|
||||
errors["base"] = "no_units"
|
||||
except OSError:
|
||||
@@ -80,7 +97,7 @@ class CoolmasterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
)
|
||||
|
||||
return self._async_get_entry(user_input)
|
||||
|
||||
@@ -6,5 +6,6 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
CONF_SEND_WAKEUP_PROMPT = "send_wakeup_prompt"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pycoolmasternet_async"],
|
||||
"requirements": ["pycoolmasternet-async==0.2.2"]
|
||||
"requirements": ["pycoolmasternet-async==0.2.4"]
|
||||
}
|
||||
|
||||
@@ -14,10 +14,12 @@
|
||||
"heat_cool": "Support automatic heat/cool mode",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"off": "Can be turned off",
|
||||
"send_wakeup_prompt": "Send wakeup prompt",
|
||||
"swing_support": "Control swing mode"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your CoolMasterNet device."
|
||||
"host": "The hostname or IP address of your CoolMasterNet device.",
|
||||
"send_wakeup_prompt": "Send the coolmaster unit an empty commaand before issuing any actual command. This is required for serial models."
|
||||
},
|
||||
"description": "Set up your CoolMasterNet connection details."
|
||||
}
|
||||
|
||||
@@ -4,15 +4,15 @@ from homeassistant.const import STATE_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_from_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
make_entity_origin_state_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"entered_home": make_entity_state_trigger(DOMAIN, STATE_HOME),
|
||||
"left_home": make_entity_from_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
"entered_home": make_entity_target_state_trigger(DOMAIN, STATE_HOME),
|
||||
"left_home": make_entity_origin_state_trigger(DOMAIN, from_state=STATE_HOME),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["async_upnp_client"],
|
||||
"requirements": ["async-upnp-client==0.46.0", "getmac==0.9.5"],
|
||||
"requirements": ["async-upnp-client==0.46.1", "getmac==0.9.5"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["async-upnp-client==0.46.0"],
|
||||
"requirements": ["async-upnp-client==0.46.1"],
|
||||
"ssdp": [
|
||||
{
|
||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": [],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["enocean"],
|
||||
"requirements": ["enocean==0.50"],
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.0.0",
|
||||
"aioesphomeapi==43.3.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["ffmpeg"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/freebox",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["freebox_api"],
|
||||
"requirements": ["freebox-api==1.2.2"],
|
||||
|
||||
@@ -6,7 +6,7 @@ from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from pyfritzhome import Fritzhome, FritzhomeDevice, LoginError
|
||||
from pyfritzhome.devicetypes import FritzhomeTemplate
|
||||
from pyfritzhome.devicetypes import FritzhomeTemplate, FritzhomeTrigger
|
||||
from requests.exceptions import ConnectionError as RequestConnectionError, HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -27,6 +27,7 @@ class FritzboxCoordinatorData:
|
||||
|
||||
devices: dict[str, FritzhomeDevice]
|
||||
templates: dict[str, FritzhomeTemplate]
|
||||
triggers: dict[str, FritzhomeTrigger]
|
||||
supported_color_properties: dict[str, tuple[dict, list]]
|
||||
|
||||
|
||||
@@ -37,6 +38,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
configuration_url: str
|
||||
fritz: Fritzhome
|
||||
has_templates: bool
|
||||
has_triggers: bool
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: FritzboxConfigEntry) -> None:
|
||||
"""Initialize the Fritzbox Smarthome device coordinator."""
|
||||
@@ -50,8 +52,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
self.new_devices: set[str] = set()
|
||||
self.new_templates: set[str] = set()
|
||||
self.new_triggers: set[str] = set()
|
||||
|
||||
self.data = FritzboxCoordinatorData({}, {}, {})
|
||||
self.data = FritzboxCoordinatorData({}, {}, {}, {})
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
@@ -74,6 +77,11 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
)
|
||||
LOGGER.debug("enable smarthome templates: %s", self.has_templates)
|
||||
|
||||
self.has_triggers = await self.hass.async_add_executor_job(
|
||||
self.fritz.has_triggers
|
||||
)
|
||||
LOGGER.debug("enable smarthome triggers: %s", self.has_triggers)
|
||||
|
||||
self.configuration_url = self.fritz.get_prefixed_host()
|
||||
|
||||
await self.async_config_entry_first_refresh()
|
||||
@@ -92,7 +100,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
|
||||
available_main_ains = [
|
||||
ain
|
||||
for ain, dev in data.devices.items() | data.templates.items()
|
||||
for ain, dev in (data.devices | data.templates | data.triggers).items()
|
||||
if dev.device_and_unit_id[1] is None
|
||||
]
|
||||
device_reg = dr.async_get(self.hass)
|
||||
@@ -112,6 +120,9 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
self.fritz.update_devices(ignore_removed=False)
|
||||
if self.has_templates:
|
||||
self.fritz.update_templates(ignore_removed=False)
|
||||
if self.has_triggers:
|
||||
self.fritz.update_triggers(ignore_removed=False)
|
||||
|
||||
except RequestConnectionError as ex:
|
||||
raise UpdateFailed from ex
|
||||
except HTTPError:
|
||||
@@ -123,6 +134,8 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
self.fritz.update_devices(ignore_removed=False)
|
||||
if self.has_templates:
|
||||
self.fritz.update_templates(ignore_removed=False)
|
||||
if self.has_triggers:
|
||||
self.fritz.update_triggers(ignore_removed=False)
|
||||
|
||||
devices = self.fritz.get_devices()
|
||||
device_data = {}
|
||||
@@ -156,12 +169,20 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
for template in templates:
|
||||
template_data[template.ain] = template
|
||||
|
||||
trigger_data = {}
|
||||
if self.has_triggers:
|
||||
triggers = self.fritz.get_triggers()
|
||||
for trigger in triggers:
|
||||
trigger_data[trigger.ain] = trigger
|
||||
|
||||
self.new_devices = device_data.keys() - self.data.devices.keys()
|
||||
self.new_templates = template_data.keys() - self.data.templates.keys()
|
||||
self.new_triggers = trigger_data.keys() - self.data.triggers.keys()
|
||||
|
||||
return FritzboxCoordinatorData(
|
||||
devices=device_data,
|
||||
templates=template_data,
|
||||
triggers=trigger_data,
|
||||
supported_color_properties=supported_color_properties,
|
||||
)
|
||||
|
||||
@@ -193,6 +214,7 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
|
||||
if (
|
||||
self.data.devices.keys() - new_data.devices.keys()
|
||||
or self.data.templates.keys() - new_data.templates.keys()
|
||||
or self.data.triggers.keys() - new_data.triggers.keys()
|
||||
):
|
||||
self.cleanup_removed_devices(new_data)
|
||||
|
||||
|
||||
@@ -4,14 +4,17 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyfritzhome.devicetypes import FritzhomeTrigger
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .entity import FritzBoxDeviceEntity, FritzBoxEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -26,21 +29,27 @@ async def async_setup_entry(
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
@callback
|
||||
def _add_entities(devices: set[str] | None = None) -> None:
|
||||
"""Add devices."""
|
||||
def _add_entities(
|
||||
devices: set[str] | None = None, triggers: set[str] | None = None
|
||||
) -> None:
|
||||
"""Add devices and triggers."""
|
||||
if devices is None:
|
||||
devices = coordinator.new_devices
|
||||
if not devices:
|
||||
if triggers is None:
|
||||
triggers = coordinator.new_triggers
|
||||
if not devices and not triggers:
|
||||
return
|
||||
async_add_entities(
|
||||
entities = [
|
||||
FritzboxSwitch(coordinator, ain)
|
||||
for ain in devices
|
||||
if coordinator.data.devices[ain].has_switch
|
||||
)
|
||||
] + [FritzboxTrigger(coordinator, ain) for ain in triggers]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_add_entities))
|
||||
|
||||
_add_entities(set(coordinator.data.devices))
|
||||
_add_entities(set(coordinator.data.devices), set(coordinator.data.triggers))
|
||||
|
||||
|
||||
class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
|
||||
@@ -70,3 +79,42 @@ class FritzboxSwitch(FritzBoxDeviceEntity, SwitchEntity):
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="manual_switching_disabled",
|
||||
)
|
||||
|
||||
|
||||
class FritzboxTrigger(FritzBoxEntity, SwitchEntity):
|
||||
"""The switch class for FRITZ!SmartHome triggers."""
|
||||
|
||||
@property
|
||||
def data(self) -> FritzhomeTrigger:
|
||||
"""Return the trigger data entity."""
|
||||
return self.coordinator.data.triggers[self.ain]
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device specific attributes."""
|
||||
return DeviceInfo(
|
||||
name=self.data.name,
|
||||
identifiers={(DOMAIN, self.ain)},
|
||||
configuration_url=self.coordinator.configuration_url,
|
||||
manufacturer="FRITZ!",
|
||||
model="SmartHome Routine",
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the trigger is active."""
|
||||
return self.data.active # type: ignore [no-any-return]
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Activate the trigger."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.fritz.set_trigger_active, self.ain
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Deactivate the trigger."""
|
||||
await self.hass.async_add_executor_job(
|
||||
self.coordinator.fritz.set_trigger_inactive, self.ain
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251203.2"]
|
||||
"requirements": ["home-assistant-frontend==20251203.3"]
|
||||
}
|
||||
|
||||
@@ -2,15 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.stream import (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.const import CONF_AUTHENTICATION, CONF_VERIFY_SSL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .const import CONF_FRAMERATE, CONF_LIMIT_REFETCH_TO_URL_CHANGE, SECTION_ADVANCED
|
||||
|
||||
DOMAIN = "generic"
|
||||
PLATFORMS = [Platform.CAMERA]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
@@ -47,3 +55,38 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s:%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version > 2:
|
||||
# This means the user has downgraded from a future version
|
||||
return False
|
||||
|
||||
if entry.version == 1:
|
||||
# Migrate to advanced section
|
||||
new_options = {**entry.options}
|
||||
advanced = new_options[SECTION_ADVANCED] = {
|
||||
CONF_FRAMERATE: new_options.pop(CONF_FRAMERATE),
|
||||
CONF_VERIFY_SSL: new_options.pop(CONF_VERIFY_SSL),
|
||||
}
|
||||
|
||||
# migrate optional fields
|
||||
for key in (
|
||||
CONF_RTSP_TRANSPORT,
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
CONF_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
):
|
||||
if key in new_options:
|
||||
advanced[key] = new_options.pop(key)
|
||||
|
||||
hass.config_entries.async_update_entry(entry, options=new_options, version=2)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -41,6 +41,7 @@ from .const import (
|
||||
CONF_STILL_IMAGE_URL,
|
||||
CONF_STREAM_SOURCE,
|
||||
GET_IMAGE_TIMEOUT,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -62,9 +63,11 @@ def generate_auth(device_info: Mapping[str, Any]) -> httpx.Auth | None:
|
||||
"""Generate httpx.Auth object from credentials."""
|
||||
username: str | None = device_info.get(CONF_USERNAME)
|
||||
password: str | None = device_info.get(CONF_PASSWORD)
|
||||
authentication = device_info.get(CONF_AUTHENTICATION)
|
||||
if username and password:
|
||||
if authentication == HTTP_DIGEST_AUTHENTICATION:
|
||||
if (
|
||||
device_info[SECTION_ADVANCED].get(CONF_AUTHENTICATION)
|
||||
== HTTP_DIGEST_AUTHENTICATION
|
||||
):
|
||||
return httpx.DigestAuth(username=username, password=password)
|
||||
return httpx.BasicAuth(username=username, password=password)
|
||||
return None
|
||||
@@ -99,14 +102,16 @@ class GenericCamera(Camera):
|
||||
if self._stream_source:
|
||||
self._stream_source = Template(self._stream_source, hass)
|
||||
self._attr_supported_features = CameraEntityFeature.STREAM
|
||||
self._limit_refetch = device_info.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False)
|
||||
self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE]
|
||||
self._limit_refetch = device_info[SECTION_ADVANCED].get(
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE, False
|
||||
)
|
||||
self._attr_frame_interval = 1 / device_info[SECTION_ADVANCED][CONF_FRAMERATE]
|
||||
self.content_type = device_info[CONF_CONTENT_TYPE]
|
||||
self.verify_ssl = device_info[CONF_VERIFY_SSL]
|
||||
if device_info.get(CONF_RTSP_TRANSPORT):
|
||||
self.stream_options[CONF_RTSP_TRANSPORT] = device_info[CONF_RTSP_TRANSPORT]
|
||||
self.verify_ssl = device_info[SECTION_ADVANCED][CONF_VERIFY_SSL]
|
||||
if rtsp_transport := device_info[SECTION_ADVANCED].get(CONF_RTSP_TRANSPORT):
|
||||
self.stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport
|
||||
self._auth = generate_auth(device_info)
|
||||
if device_info.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
if device_info[SECTION_ADVANCED].get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
self.stream_options[CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
|
||||
self._last_url = None
|
||||
|
||||
@@ -50,10 +50,18 @@ from homeassistant.const import (
|
||||
HTTP_DIGEST_AUTHENTICATION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import config_validation as cv, template as template_helper
|
||||
from homeassistant.helpers.entity_platform import PlatformData
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.network import get_url
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .camera import GenericCamera, generate_auth
|
||||
@@ -67,17 +75,20 @@ from .const import (
|
||||
DEFAULT_NAME,
|
||||
DOMAIN,
|
||||
GET_IMAGE_TIMEOUT,
|
||||
SECTION_ADVANCED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DATA = {
|
||||
CONF_NAME: DEFAULT_NAME,
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_FRAMERATE: 2,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
SECTION_ADVANCED: {
|
||||
CONF_AUTHENTICATION: HTTP_BASIC_AUTHENTICATION,
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE: False,
|
||||
CONF_FRAMERATE: 2,
|
||||
CONF_VERIFY_SSL: True,
|
||||
CONF_RTSP_TRANSPORT: "tcp",
|
||||
},
|
||||
}
|
||||
|
||||
SUPPORTED_IMAGE_TYPES = {"png", "jpeg", "gif", "svg+xml", "webp"}
|
||||
@@ -94,58 +105,47 @@ class InvalidStreamException(HomeAssistantError):
|
||||
|
||||
|
||||
def build_schema(
|
||||
user_input: Mapping[str, Any],
|
||||
is_options_flow: bool = False,
|
||||
show_advanced_options: bool = False,
|
||||
) -> vol.Schema:
|
||||
"""Create schema for camera config setup."""
|
||||
rtsp_options = [
|
||||
SelectOptionDict(
|
||||
value=value,
|
||||
label=name,
|
||||
)
|
||||
for value, name in RTSP_TRANSPORTS.items()
|
||||
]
|
||||
|
||||
advanced_section = {
|
||||
vol.Required(CONF_FRAMERATE): vol.All(
|
||||
vol.Range(min=0, min_included=False), cv.positive_float
|
||||
),
|
||||
vol.Required(CONF_VERIFY_SSL): bool,
|
||||
vol.Optional(CONF_RTSP_TRANSPORT): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=rtsp_options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_AUTHENTICATION): vol.In(
|
||||
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
|
||||
),
|
||||
}
|
||||
spec = {
|
||||
vol.Optional(
|
||||
CONF_STILL_IMAGE_URL,
|
||||
description={"suggested_value": user_input.get(CONF_STILL_IMAGE_URL, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_STREAM_SOURCE,
|
||||
description={"suggested_value": user_input.get(CONF_STREAM_SOURCE, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_RTSP_TRANSPORT,
|
||||
description={"suggested_value": user_input.get(CONF_RTSP_TRANSPORT)},
|
||||
): vol.In(RTSP_TRANSPORTS),
|
||||
vol.Optional(
|
||||
CONF_AUTHENTICATION,
|
||||
description={"suggested_value": user_input.get(CONF_AUTHENTICATION)},
|
||||
): vol.In([HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]),
|
||||
vol.Optional(
|
||||
CONF_USERNAME,
|
||||
description={"suggested_value": user_input.get(CONF_USERNAME, "")},
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_PASSWORD,
|
||||
description={"suggested_value": user_input.get(CONF_PASSWORD, "")},
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_FRAMERATE,
|
||||
description={"suggested_value": user_input.get(CONF_FRAMERATE, 2)},
|
||||
): vol.All(vol.Range(min=0, min_included=False), cv.positive_float),
|
||||
vol.Required(
|
||||
CONF_VERIFY_SSL, default=user_input.get(CONF_VERIFY_SSL, True)
|
||||
): bool,
|
||||
vol.Optional(CONF_STREAM_SOURCE): str,
|
||||
vol.Optional(CONF_STILL_IMAGE_URL): str,
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
vol.Required(SECTION_ADVANCED): section(
|
||||
vol.Schema(advanced_section), {"collapsed": True}
|
||||
),
|
||||
}
|
||||
if is_options_flow:
|
||||
spec[
|
||||
vol.Required(
|
||||
CONF_LIMIT_REFETCH_TO_URL_CHANGE,
|
||||
default=user_input.get(CONF_LIMIT_REFETCH_TO_URL_CHANGE, False),
|
||||
)
|
||||
] = bool
|
||||
advanced_section[vol.Optional(CONF_LIMIT_REFETCH_TO_URL_CHANGE)] = bool
|
||||
if show_advanced_options:
|
||||
spec[
|
||||
vol.Required(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS,
|
||||
default=user_input.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False),
|
||||
)
|
||||
] = bool
|
||||
advanced_section[vol.Optional(CONF_USE_WALLCLOCK_AS_TIMESTAMPS)] = bool
|
||||
|
||||
return vol.Schema(spec)
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@ async def async_test_still(
|
||||
return {CONF_STILL_IMAGE_URL: "malformed_url"}, None
|
||||
if not yarl_url.is_absolute():
|
||||
return {CONF_STILL_IMAGE_URL: "relative_url"}, None
|
||||
verify_ssl = info[CONF_VERIFY_SSL]
|
||||
verify_ssl = info[SECTION_ADVANCED][CONF_VERIFY_SSL]
|
||||
auth = generate_auth(info)
|
||||
try:
|
||||
async_client = get_async_client(hass, verify_ssl=verify_ssl)
|
||||
@@ -268,9 +268,9 @@ async def async_test_and_preview_stream(
|
||||
_LOGGER.warning("Problem rendering template %s: %s", stream_source, err)
|
||||
raise InvalidStreamException("template_error") from err
|
||||
stream_options: dict[str, str | bool | float] = {}
|
||||
if rtsp_transport := info.get(CONF_RTSP_TRANSPORT):
|
||||
if rtsp_transport := info[SECTION_ADVANCED].get(CONF_RTSP_TRANSPORT):
|
||||
stream_options[CONF_RTSP_TRANSPORT] = rtsp_transport
|
||||
if info.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
if info[SECTION_ADVANCED].get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS):
|
||||
stream_options[CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = True
|
||||
|
||||
try:
|
||||
@@ -326,7 +326,7 @@ def register_still_preview(hass: HomeAssistant) -> None:
|
||||
class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for generic IP camera."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Generic ConfigFlow."""
|
||||
@@ -381,7 +381,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_input = DEFAULT_DATA.copy()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=build_schema(user_input),
|
||||
data_schema=self.add_suggested_values_to_schema(build_schema(), user_input),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -449,13 +449,19 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
self.preview_stream = None
|
||||
if not errors:
|
||||
data = {
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS: self.config_entry.options.get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
),
|
||||
**user_input,
|
||||
CONF_CONTENT_TYPE: still_format
|
||||
or self.config_entry.options.get(CONF_CONTENT_TYPE),
|
||||
}
|
||||
if (
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS
|
||||
not in user_input[SECTION_ADVANCED]
|
||||
):
|
||||
data[SECTION_ADVANCED][CONF_USE_WALLCLOCK_AS_TIMESTAMPS] = (
|
||||
self.config_entry.options[SECTION_ADVANCED].get(
|
||||
CONF_USE_WALLCLOCK_AS_TIMESTAMPS, False
|
||||
)
|
||||
)
|
||||
self.user_input = data
|
||||
# temporary preview for user to check the image
|
||||
self.preview_image_settings = data
|
||||
@@ -464,10 +470,12 @@ class GenericOptionsFlowHandler(OptionsFlow):
|
||||
user_input = self.user_input
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=build_schema(
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
build_schema(
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
user_input or self.config_entry.options,
|
||||
True,
|
||||
self.show_advanced_options,
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
@@ -583,7 +591,8 @@ async def ws_start_preview(
|
||||
_LOGGER.debug("Got preview still URL: %s", ha_still_url)
|
||||
|
||||
if ha_stream := flow.preview_stream:
|
||||
ha_stream_url = ha_stream.endpoint_url(HLS_PROVIDER)
|
||||
# HLS player needs an absolute URL as base for constructing child playlist URLs
|
||||
ha_stream_url = f"{get_url(hass)}{ha_stream.endpoint_url(HLS_PROVIDER)}"
|
||||
_LOGGER.debug("Got preview stream URL: %s", ha_stream_url)
|
||||
|
||||
connection.send_message(
|
||||
|
||||
@@ -9,3 +9,4 @@ CONF_STILL_IMAGE_URL = "still_image_url"
|
||||
CONF_STREAM_SOURCE = "stream_source"
|
||||
CONF_FRAMERATE = "framerate"
|
||||
GET_IMAGE_TIMEOUT = 10
|
||||
SECTION_ADVANCED = "advanced"
|
||||
|
||||
@@ -26,17 +26,24 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"framerate": "Frame rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to URL change",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"rtsp_transport": "RTSP transport protocol",
|
||||
"still_image_url": "Still image URL (e.g. http://...)",
|
||||
"stream_source": "Stream source URL (e.g. rtsp://...)",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"description": "Enter the settings to connect to the camera."
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"authentication": "Authentication",
|
||||
"framerate": "Frame rate (Hz)",
|
||||
"limit_refetch_to_url_change": "Limit refetch to URL change",
|
||||
"rtsp_transport": "RTSP transport protocol",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"description": "Advanced settings are only needed for special cases. Leave them unchanged unless you know what you are doing.",
|
||||
"name": "Advanced settings"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_confirm": {
|
||||
"data": {
|
||||
@@ -70,19 +77,27 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"authentication": "[%key:component::generic::config::step::user::data::authentication%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::data::framerate%]",
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::data::limit_refetch_to_url_change%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"rtsp_transport": "[%key:component::generic::config::step::user::data::rtsp_transport%]",
|
||||
"still_image_url": "[%key:component::generic::config::step::user::data::still_image_url%]",
|
||||
"stream_source": "[%key:component::generic::config::step::user::data::stream_source%]",
|
||||
"use_wallclock_as_timestamps": "Use wallclock as timestamps",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
"sections": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"authentication": "[%key:component::generic::config::step::user::sections::advanced::data::authentication%]",
|
||||
"framerate": "[%key:component::generic::config::step::user::sections::advanced::data::framerate%]",
|
||||
"limit_refetch_to_url_change": "[%key:component::generic::config::step::user::sections::advanced::data::limit_refetch_to_url_change%]",
|
||||
"rtsp_transport": "[%key:component::generic::config::step::user::sections::advanced::data::rtsp_transport%]",
|
||||
"use_wallclock_as_timestamps": "Use wallclock as timestamps",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"use_wallclock_as_timestamps": "This option may correct segmenting or crashing issues arising from buggy timestamp implementations on some cameras"
|
||||
},
|
||||
"description": "[%key:component::generic::config::step::user::sections::advanced::description%]",
|
||||
"name": "[%key:component::generic::config::step::user::sections::advanced::name%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user_confirm": {
|
||||
|
||||
@@ -8,4 +8,4 @@ HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
# When changing this version, also update the corresponding SHA hash (_GO2RTC_SHA)
|
||||
# in script/hassfest/docker.py.
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
RECOMMENDED_VERSION = "1.9.13"
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
"homekit": {
|
||||
"models": ["iSmartGate"]
|
||||
},
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["ismartgate"],
|
||||
"requirements": ["ismartgate==5.0.2"]
|
||||
|
||||
@@ -101,6 +101,15 @@ def _is_location_already_configured(
|
||||
return False
|
||||
|
||||
|
||||
def _is_location_name_already_configured(hass: HomeAssistant, new_data: str) -> bool:
|
||||
"""Check if the location name is already configured."""
|
||||
for entry in hass.config_entries.async_entries(DOMAIN):
|
||||
for subentry in entry.subentries.values():
|
||||
if subentry.title.lower() == new_data.lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class GoogleAirQualityConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google AirQuality."""
|
||||
|
||||
@@ -178,8 +187,19 @@ class LocationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
description_placeholders: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
|
||||
return self.async_abort(reason="already_configured")
|
||||
errors["base"] = "location_already_configured"
|
||||
if _is_location_name_already_configured(self.hass, user_input[CONF_NAME]):
|
||||
errors["base"] = "location_name_already_configured"
|
||||
api: GoogleAirQualityApi = self._get_entry().runtime_data.api
|
||||
if errors:
|
||||
return self.async_show_form(
|
||||
step_id="location",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
_get_location_schema(self.hass), user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
if await _validate_input(user_input, api, errors, description_placeholders):
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME],
|
||||
|
||||
@@ -47,12 +47,12 @@
|
||||
"config_subentries": {
|
||||
"location": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"unable_to_fetch": "[%key:component::google_air_quality::common::unable_to_fetch%]"
|
||||
},
|
||||
"entry_type": "Air quality location",
|
||||
"error": {
|
||||
"no_data_for_location": "Information is unavailable for this location. Please try a different location.",
|
||||
"location_already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
|
||||
"location_name_already_configured": "Location name already configured.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
|
||||
@@ -7,6 +7,7 @@ ATTR_CC = "cc"
|
||||
ATTR_ENABLED = "enabled"
|
||||
ATTR_END = "end"
|
||||
ATTR_FROM = "from"
|
||||
ATTR_ALIAS_FROM = "alias_from"
|
||||
ATTR_ME = "me"
|
||||
ATTR_MESSAGE = "message"
|
||||
ATTR_PLAIN_TEXT = "plain_text"
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from email.mime.text import MIMEText
|
||||
from email.utils import formataddr
|
||||
from typing import Any
|
||||
|
||||
from googleapiclient.http import HttpRequest
|
||||
@@ -17,10 +18,20 @@ from homeassistant.components.notify import (
|
||||
BaseNotificationService,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .api import AsyncConfigEntryAuth
|
||||
from .const import ATTR_BCC, ATTR_CC, ATTR_FROM, ATTR_ME, ATTR_SEND, DATA_AUTH
|
||||
from .const import (
|
||||
ATTR_ALIAS_FROM,
|
||||
ATTR_BCC,
|
||||
ATTR_CC,
|
||||
ATTR_FROM,
|
||||
ATTR_ME,
|
||||
ATTR_SEND,
|
||||
DATA_AUTH,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
@@ -47,7 +58,17 @@ class GMailNotificationService(BaseNotificationService):
|
||||
email = MIMEText(message, "html")
|
||||
if to_addrs := kwargs.get(ATTR_TARGET):
|
||||
email["To"] = ", ".join(to_addrs)
|
||||
email["From"] = data.get(ATTR_FROM, ATTR_ME)
|
||||
|
||||
email_from = data.get(ATTR_FROM, ATTR_ME)
|
||||
if alias := data.get(ATTR_ALIAS_FROM):
|
||||
if email_from == ATTR_ME:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_from_for_alias",
|
||||
)
|
||||
email["From"] = formataddr((alias, email_from))
|
||||
else:
|
||||
email["From"] = email_from
|
||||
email["Subject"] = title
|
||||
email[ATTR_CC] = ", ".join(data.get(ATTR_CC, []))
|
||||
email[ATTR_BCC] = ", ".join(data.get(ATTR_BCC, []))
|
||||
@@ -57,9 +78,9 @@ class GMailNotificationService(BaseNotificationService):
|
||||
msg: HttpRequest
|
||||
users = (await self.auth.get_resource()).users()
|
||||
if data.get(ATTR_SEND) is False:
|
||||
msg = users.drafts().create(userId=email["From"], body={ATTR_MESSAGE: body})
|
||||
msg = users.drafts().create(userId=email_from, body={ATTR_MESSAGE: body})
|
||||
else:
|
||||
if not to_addrs:
|
||||
raise ValueError("recipient address required")
|
||||
msg = users.messages().send(userId=email["From"], body=body)
|
||||
msg = users.messages().send(userId=email_from, body=body)
|
||||
await self.hass.async_add_executor_job(msg.execute)
|
||||
|
||||
@@ -47,6 +47,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"missing_from_for_alias": {
|
||||
"message": "Missing 'from' email when setting an alias to show. You have to provide a 'from' email"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_vacation": {
|
||||
"description": "Sets vacation responder settings for Google Mail.",
|
||||
|
||||
@@ -16,6 +16,7 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
|
||||
from .const import (
|
||||
ABORT_NO_PLANTS,
|
||||
@@ -23,12 +24,13 @@ from .const import (
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
CONF_PLANT_ID,
|
||||
CONF_REGION,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
ERROR_CANNOT_CONNECT,
|
||||
ERROR_INVALID_AUTH,
|
||||
LOGIN_INVALID_AUTH_CODE,
|
||||
SERVER_URLS,
|
||||
SERVER_URLS_NAMES,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -67,10 +69,13 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.auth_type = AUTH_PASSWORD
|
||||
|
||||
# Traditional username/password authentication
|
||||
# Convert region name to URL - guaranteed to exist since vol.In validates it
|
||||
server_url = SERVER_URLS_NAMES[user_input[CONF_REGION]]
|
||||
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
|
||||
)
|
||||
self.api.server_url = user_input[CONF_URL]
|
||||
self.api.server_url = server_url
|
||||
|
||||
try:
|
||||
login_response = await self.hass.async_add_executor_job(
|
||||
@@ -91,6 +96,8 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
self.user_id = login_response["user"]["id"]
|
||||
self.data = user_input
|
||||
# Store the actual URL, not the region name
|
||||
self.data[CONF_URL] = server_url
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
@@ -104,8 +111,11 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.auth_type = AUTH_API_TOKEN
|
||||
|
||||
# Using token authentication
|
||||
token = user_input[CONF_TOKEN]
|
||||
self.api = growattServer.OpenApiV1(token=token)
|
||||
# Convert region name to URL - guaranteed to exist since vol.In validates it
|
||||
server_url = SERVER_URLS_NAMES[user_input[CONF_REGION]]
|
||||
|
||||
self.api = growattServer.OpenApiV1(token=user_input[CONF_TOKEN])
|
||||
self.api.server_url = server_url
|
||||
|
||||
# Verify token by fetching plant list
|
||||
try:
|
||||
@@ -127,6 +137,8 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
|
||||
self.data = user_input
|
||||
# Store the actual URL, not the region name
|
||||
self.data[CONF_URL] = server_url
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
@@ -139,7 +151,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_URL, default=DEFAULT_URL): vol.In(SERVER_URLS),
|
||||
vol.Required(CONF_REGION, default=DEFAULT_URL): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(SERVER_URLS_NAMES.keys()),
|
||||
translation_key="region",
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -155,6 +172,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): str,
|
||||
vol.Required(CONF_REGION, default=DEFAULT_URL): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(SERVER_URLS_NAMES.keys()),
|
||||
translation_key="region",
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from homeassistant.const import Platform
|
||||
|
||||
CONF_PLANT_ID = "plant_id"
|
||||
CONF_REGION = "region"
|
||||
|
||||
|
||||
# API key support
|
||||
@@ -18,13 +19,14 @@ DEFAULT_PLANT_ID = "0"
|
||||
|
||||
DEFAULT_NAME = "Growatt"
|
||||
|
||||
SERVER_URLS = [
|
||||
"https://openapi.growatt.com/", # Other regional server
|
||||
"https://openapi-cn.growatt.com/", # Chinese server
|
||||
"https://openapi-us.growatt.com/", # North American server
|
||||
"https://openapi-au.growatt.com/", # Australia Server
|
||||
"http://server.smten.com/", # smten server
|
||||
]
|
||||
SERVER_URLS_NAMES = {
|
||||
"north_america": "https://openapi-us.growatt.com/",
|
||||
"australia_new_zealand": "https://openapi-au.growatt.com/",
|
||||
"china": "https://openapi-cn.growatt.com/",
|
||||
"other_regions": "https://openapi.growatt.com/",
|
||||
"smten_server": "http://server.smten.com/",
|
||||
"era_server": "http://ess-server.atesspower.com/",
|
||||
}
|
||||
|
||||
DEPRECATED_URLS = [
|
||||
"https://server.growatt.com/",
|
||||
@@ -32,7 +34,7 @@ DEPRECATED_URLS = [
|
||||
"https://server-us.growatt.com/",
|
||||
]
|
||||
|
||||
DEFAULT_URL = SERVER_URLS[0]
|
||||
DEFAULT_URL = "other_regions"
|
||||
|
||||
DOMAIN = "growatt_server"
|
||||
|
||||
|
||||
@@ -24,9 +24,7 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: todo
|
||||
comment: Update server URL dropdown to show regional descriptions (e.g., 'China', 'United States') instead of raw URLs.
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"password_auth": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"url": "Server region",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"title": "Enter your Growatt login credentials"
|
||||
@@ -26,7 +26,8 @@
|
||||
},
|
||||
"token_auth": {
|
||||
"data": {
|
||||
"token": "API Token"
|
||||
"token": "API Token",
|
||||
"url": "Server region"
|
||||
},
|
||||
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
|
||||
"title": "Enter your API token"
|
||||
@@ -530,6 +531,16 @@
|
||||
"grid_first": "Grid first",
|
||||
"load_first": "Load first"
|
||||
}
|
||||
},
|
||||
"region": {
|
||||
"options": {
|
||||
"australia_new_zealand": "Australia and New Zealand",
|
||||
"china": "China",
|
||||
"era_server": "Era server (Atess Power)",
|
||||
"north_america": "North America",
|
||||
"other_regions": "Other regions",
|
||||
"smten_server": "SMTEN server"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -51,12 +51,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
|
||||
try:
|
||||
camera = await hass.async_add_executor_job(
|
||||
HikCamera, url, port, username, password
|
||||
HikCamera, url, port, username, password, ssl
|
||||
)
|
||||
except requests.exceptions.RequestException as err:
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {host}") from err
|
||||
|
||||
device_id = camera.get_id()
|
||||
device_id = camera.get_id
|
||||
if device_id is None:
|
||||
raise ConfigEntryNotReady(f"Unable to get device ID from {host}")
|
||||
|
||||
|
||||
@@ -49,14 +49,14 @@ class HikvisionConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
camera = await self.hass.async_add_executor_job(
|
||||
HikCamera, url, port, username, password
|
||||
HikCamera, url, port, username, password, ssl
|
||||
)
|
||||
device_id = camera.get_id()
|
||||
device_name = camera.get_name
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.exception("Error connecting to Hikvision device")
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
device_id = camera.get_id
|
||||
device_name = camera.get_name
|
||||
if device_id is None:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
@@ -102,16 +102,16 @@ class HikvisionConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
camera = await self.hass.async_add_executor_job(
|
||||
HikCamera, url, port, username, password
|
||||
HikCamera, url, port, username, password, ssl
|
||||
)
|
||||
device_id = camera.get_id()
|
||||
device_name = camera.get_name
|
||||
except requests.exceptions.RequestException:
|
||||
_LOGGER.exception(
|
||||
"Error connecting to Hikvision device during import, aborting"
|
||||
)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
device_id = camera.get_id
|
||||
device_name = camera.get_name
|
||||
if device_id is None:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"domain": "hikvision",
|
||||
"name": "Hikvision",
|
||||
"codeowners": ["@mezz64"],
|
||||
"codeowners": ["@mezz64", "@ptarjan"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hikvision",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyhik"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyHik==0.3.2"]
|
||||
"requirements": ["pyHik==0.3.4"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@bannhead"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hisense_aehw4a1",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyaehw4a1"],
|
||||
"requirements": ["pyaehw4a1==0.3.9"]
|
||||
|
||||
@@ -65,6 +65,11 @@ BINARY_SENSORS = (
|
||||
},
|
||||
translation_key="charging_connection",
|
||||
),
|
||||
HomeConnectBinarySensorEntityDescription(
|
||||
key=StatusKey.BSH_COMMON_INTERIOR_ILLUMINATION_ACTIVE,
|
||||
translation_key="interior_illumination_active",
|
||||
device_class=BinarySensorDeviceClass.LIGHT,
|
||||
),
|
||||
HomeConnectBinarySensorEntityDescription(
|
||||
key=StatusKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_DUST_BOX_INSERTED,
|
||||
translation_key="dust_box_inserted",
|
||||
|
||||
@@ -270,6 +270,10 @@ WARMING_LEVEL_OPTIONS = {
|
||||
)
|
||||
}
|
||||
|
||||
RINSE_PLUS_OPTIONS = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in ("LaundryCare.Washer.EnumType.RinsePlus.Off",)
|
||||
}
|
||||
TEMPERATURE_OPTIONS = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in (
|
||||
@@ -309,6 +313,12 @@ SPIN_SPEED_OPTIONS = {
|
||||
)
|
||||
}
|
||||
|
||||
STAINS_OPTIONS = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in ("LaundryCare.Washer.EnumType.Stains.Off",)
|
||||
}
|
||||
|
||||
|
||||
VARIO_PERFECT_OPTIONS = {
|
||||
bsh_key_to_translation_key(option): option
|
||||
for option in (
|
||||
@@ -363,8 +373,10 @@ PROGRAM_ENUM_OPTIONS = {
|
||||
(OptionKey.COOKING_COMMON_HOOD_VENTING_LEVEL, VENTING_LEVEL_OPTIONS),
|
||||
(OptionKey.COOKING_COMMON_HOOD_INTENSIVE_LEVEL, INTENSIVE_LEVEL_OPTIONS),
|
||||
(OptionKey.COOKING_OVEN_WARMING_LEVEL, WARMING_LEVEL_OPTIONS),
|
||||
(OptionKey.LAUNDRY_CARE_WASHER_RINSE_PLUS, RINSE_PLUS_OPTIONS),
|
||||
(OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE, TEMPERATURE_OPTIONS),
|
||||
(OptionKey.LAUNDRY_CARE_WASHER_SPIN_SPEED, SPIN_SPEED_OPTIONS),
|
||||
(OptionKey.LAUNDRY_CARE_WASHER_STAINS, STAINS_OPTIONS),
|
||||
(OptionKey.LAUNDRY_CARE_COMMON_VARIO_PERFECT, VARIO_PERFECT_OPTIONS),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,12 @@
|
||||
"dust_box_inserted": {
|
||||
"default": "mdi:download"
|
||||
},
|
||||
"interior_illumination_active": {
|
||||
"default": "mdi:lightbulb-on",
|
||||
"state": {
|
||||
"off": "mdi:lightbulb-off"
|
||||
}
|
||||
},
|
||||
"lifted": {
|
||||
"default": "mdi:arrow-up-right-bold"
|
||||
},
|
||||
|
||||
@@ -29,7 +29,9 @@ from .const import (
|
||||
HOT_WATER_TEMPERATURE_OPTIONS,
|
||||
INTENSIVE_LEVEL_OPTIONS,
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP,
|
||||
RINSE_PLUS_OPTIONS,
|
||||
SPIN_SPEED_OPTIONS,
|
||||
STAINS_OPTIONS,
|
||||
SUCTION_POWER_OPTIONS,
|
||||
TEMPERATURE_OPTIONS,
|
||||
TRANSLATION_KEYS_PROGRAMS_MAP,
|
||||
@@ -279,6 +281,16 @@ PROGRAM_SELECT_OPTION_ENTITY_DESCRIPTIONS = (
|
||||
for translation_key, value in WARMING_LEVEL_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_RINSE_PLUS,
|
||||
translation_key="rinse_plus",
|
||||
options=list(RINSE_PLUS_OPTIONS),
|
||||
translation_key_values=RINSE_PLUS_OPTIONS,
|
||||
values_translation_key={
|
||||
value: translation_key
|
||||
for translation_key, value in RINSE_PLUS_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE,
|
||||
translation_key="washer_temperature",
|
||||
@@ -299,6 +311,15 @@ PROGRAM_SELECT_OPTION_ENTITY_DESCRIPTIONS = (
|
||||
for translation_key, value in SPIN_SPEED_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_STAINS,
|
||||
translation_key="auto_stain",
|
||||
options=list(STAINS_OPTIONS),
|
||||
translation_key_values=STAINS_OPTIONS,
|
||||
values_translation_key={
|
||||
value: translation_key for translation_key, value in STAINS_OPTIONS.items()
|
||||
},
|
||||
),
|
||||
HomeConnectSelectEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_COMMON_VARIO_PERFECT,
|
||||
translation_key="vario_perfect",
|
||||
|
||||
@@ -524,6 +524,15 @@ set_program_and_options:
|
||||
washer_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
laundry_care_washer_option_rinse_plus:
|
||||
example: laundry_care_washer_enum_type_rinse_plus_off
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
mode: dropdown
|
||||
translation_key: rinse_plus
|
||||
options:
|
||||
- laundry_care_washer_enum_type_rinse_plus_off
|
||||
laundry_care_washer_option_temperature:
|
||||
example: laundry_care_washer_enum_type_temperature_g_c_40
|
||||
required: false
|
||||
@@ -567,6 +576,15 @@ set_program_and_options:
|
||||
- laundry_care_washer_enum_type_spin_speed_ul_low
|
||||
- laundry_care_washer_enum_type_spin_speed_ul_medium
|
||||
- laundry_care_washer_enum_type_spin_speed_ul_high
|
||||
laundry_care_washer_option_stains:
|
||||
example: laundry_care_washer_enum_type_stains_off
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
mode: dropdown
|
||||
translation_key: stains
|
||||
options:
|
||||
- laundry_care_washer_enum_type_stains_off
|
||||
b_s_h_common_option_finish_in_relative:
|
||||
example: 3600
|
||||
required: false
|
||||
@@ -576,6 +594,11 @@ set_program_and_options:
|
||||
step: 1
|
||||
mode: box
|
||||
unit_of_measurement: s
|
||||
laundry_care_common_option_silent_mode:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_i_dos1_active:
|
||||
example: false
|
||||
required: false
|
||||
@@ -586,6 +609,41 @@ set_program_and_options:
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_intensive_plus:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_less_ironing:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_mini_load:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_prewash:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_rinse_hold:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_soak:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_water_plus:
|
||||
example: false
|
||||
required: false
|
||||
selector:
|
||||
boolean:
|
||||
laundry_care_washer_option_vario_perfect:
|
||||
example: laundry_care_common_enum_type_vario_perfect_eco_perfect
|
||||
required: false
|
||||
|
||||
@@ -70,6 +70,9 @@
|
||||
"freezer_door": {
|
||||
"name": "Freezer door"
|
||||
},
|
||||
"interior_illumination_active": {
|
||||
"name": "Interior illumination active"
|
||||
},
|
||||
"left_chiller_door": {
|
||||
"name": "Left chiller door"
|
||||
},
|
||||
@@ -359,6 +362,12 @@
|
||||
"b_s_h_common_enum_type_ambient_light_color_custom_color": "Custom"
|
||||
}
|
||||
},
|
||||
"auto_stain": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_stains::name%]",
|
||||
"state": {
|
||||
"laundry_care_washer_enum_type_stains_off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"bean_amount": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::consumer_products_coffee_maker_option_bean_amount::name%]",
|
||||
"state": {
|
||||
@@ -523,6 +532,12 @@
|
||||
"consumer_products_cleaning_robot_enum_type_available_maps_temp_map": "[%key:component::home_connect::selector::available_maps::options::consumer_products_cleaning_robot_enum_type_available_maps_temp_map%]"
|
||||
}
|
||||
},
|
||||
"rinse_plus": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_rinse_plus::name%]",
|
||||
"state": {
|
||||
"laundry_care_washer_enum_type_rinse_plus_off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"selected_program": {
|
||||
"name": "Selected program",
|
||||
"state": {
|
||||
@@ -1212,27 +1227,51 @@
|
||||
"intensiv_zone": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::dishcare_dishwasher_option_intensiv_zone::name%]"
|
||||
},
|
||||
"intensive_plus": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_intensive_plus::name%]"
|
||||
},
|
||||
"less_ironing": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_less_ironing::name%]"
|
||||
},
|
||||
"mini_load": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_mini_load::name%]"
|
||||
},
|
||||
"multiple_beverages": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::consumer_products_coffee_maker_option_multiple_beverages::name%]"
|
||||
},
|
||||
"power": {
|
||||
"name": "Power"
|
||||
},
|
||||
"prewash": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_prewash::name%]"
|
||||
},
|
||||
"refrigerator_super_mode": {
|
||||
"name": "Refrigerator super mode"
|
||||
},
|
||||
"rinse_hold": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_rinse_hold::name%]"
|
||||
},
|
||||
"sabbath_mode": {
|
||||
"name": "Sabbath mode"
|
||||
},
|
||||
"silence_on_demand": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::dishcare_dishwasher_option_silence_on_demand::name%]"
|
||||
},
|
||||
"silent_mode": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_common_option_silent_mode::name%]"
|
||||
},
|
||||
"soaking": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_soak::name%]"
|
||||
},
|
||||
"vacation_mode": {
|
||||
"name": "Vacation mode"
|
||||
},
|
||||
"vario_speed_plus": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::dishcare_dishwasher_option_vario_speed_plus::name%]"
|
||||
},
|
||||
"water_plus": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_water_plus::name%]"
|
||||
},
|
||||
"zeolite_dry": {
|
||||
"name": "[%key:component::home_connect::services::set_program_and_options::fields::dishcare_dishwasher_option_zeolite_dry::name%]"
|
||||
}
|
||||
@@ -1654,6 +1693,11 @@
|
||||
"laundry_care_washer_program_wool": "Wool"
|
||||
}
|
||||
},
|
||||
"rinse_plus": {
|
||||
"options": {
|
||||
"laundry_care_washer_enum_type_rinse_plus_off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"spin_speed": {
|
||||
"options": {
|
||||
"laundry_care_washer_enum_type_spin_speed_off": "[%key:common::state::off%]",
|
||||
@@ -1672,6 +1716,11 @@
|
||||
"laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"stains": {
|
||||
"options": {
|
||||
"laundry_care_washer_enum_type_stains_off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"suction_power": {
|
||||
"options": {
|
||||
"consumer_products_cleaning_robot_enum_type_suction_power_max": "Max",
|
||||
@@ -1865,6 +1914,10 @@
|
||||
"description": "Setting to adjust the venting level of the air conditioner as a percentage.",
|
||||
"name": "Fan speed percentage"
|
||||
},
|
||||
"laundry_care_common_option_silent_mode": {
|
||||
"description": "Defines if the silent mode is activated.",
|
||||
"name": "Silent mode"
|
||||
},
|
||||
"laundry_care_dryer_option_drying_target": {
|
||||
"description": "Describes the drying target for a dryer program.",
|
||||
"name": "Drying target"
|
||||
@@ -1877,10 +1930,42 @@
|
||||
"description": "Defines if the detergent feed is activated / deactivated. (i-Dos content 2)",
|
||||
"name": "i-Dos 2 Active"
|
||||
},
|
||||
"laundry_care_washer_option_intensive_plus": {
|
||||
"description": "Defines if the intensive washing is enabled for heavily soiled laundry.",
|
||||
"name": "Intensive +"
|
||||
},
|
||||
"laundry_care_washer_option_less_ironing": {
|
||||
"description": "Defines if the laundry is treated gently to reduce creasing and make ironing easier.",
|
||||
"name": "Less ironing"
|
||||
},
|
||||
"laundry_care_washer_option_mini_load": {
|
||||
"description": "Defines if the mini load option is activated.",
|
||||
"name": "Mini load"
|
||||
},
|
||||
"laundry_care_washer_option_prewash": {
|
||||
"description": "Defines if an additional prewash cycle is added to the program.",
|
||||
"name": "Prewash"
|
||||
},
|
||||
"laundry_care_washer_option_rinse_hold": {
|
||||
"description": "Defines if the rinse hold option is activated.",
|
||||
"name": "Rinse hold"
|
||||
},
|
||||
"laundry_care_washer_option_rinse_plus": {
|
||||
"description": "Defines if an additional rinse cycle is added to the program.",
|
||||
"name": "Extra rinse"
|
||||
},
|
||||
"laundry_care_washer_option_soak": {
|
||||
"description": "Defines if the soaking is activated.",
|
||||
"name": "Soaking"
|
||||
},
|
||||
"laundry_care_washer_option_spin_speed": {
|
||||
"description": "Defines the spin speed of a washer program.",
|
||||
"name": "Spin speed"
|
||||
},
|
||||
"laundry_care_washer_option_stains": {
|
||||
"description": "Defines the type of stains to be treated.",
|
||||
"name": "Auto stain"
|
||||
},
|
||||
"laundry_care_washer_option_temperature": {
|
||||
"description": "Defines the temperature of the washing program.",
|
||||
"name": "Temperature"
|
||||
@@ -1889,6 +1974,10 @@
|
||||
"description": "Defines if a cycle saves energy (Eco Perfect) or time (Speed Perfect).",
|
||||
"name": "Vario perfect"
|
||||
},
|
||||
"laundry_care_washer_option_water_plus": {
|
||||
"description": "Defines if the water plus option is activated.",
|
||||
"name": "Water +"
|
||||
},
|
||||
"program": {
|
||||
"description": "Program to select",
|
||||
"name": "Program"
|
||||
|
||||
@@ -124,6 +124,10 @@ SWITCH_OPTIONS = (
|
||||
key=OptionKey.COOKING_OVEN_FAST_PRE_HEAT,
|
||||
translation_key="fast_pre_heat",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_COMMON_SILENT_MODE,
|
||||
translation_key="silent_mode",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE,
|
||||
translation_key="i_dos1_active",
|
||||
@@ -132,6 +136,34 @@ SWITCH_OPTIONS = (
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE,
|
||||
translation_key="i_dos2_active",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_INTENSIVE_PLUS,
|
||||
translation_key="intensive_plus",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_LESS_IRONING,
|
||||
translation_key="less_ironing",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_MINI_LOAD,
|
||||
translation_key="mini_load",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_PREWASH,
|
||||
translation_key="prewash",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_RINSE_HOLD,
|
||||
translation_key="rinse_hold",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_SOAK,
|
||||
translation_key="soaking",
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key=OptionKey.LAUNDRY_CARE_WASHER_WATER_PLUS,
|
||||
translation_key="water_plus",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ from .entity import HomeWizardEntity
|
||||
def homewizard_exception_handler[_HomeWizardEntityT: HomeWizardEntity, **_P](
|
||||
func: Callable[Concatenate[_HomeWizardEntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_HomeWizardEntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate HomeWizard Energy calls to handle HomeWizardEnergy exceptions.
|
||||
"""Decorate HomeWizard calls to handle HomeWizardEnergy exceptions.
|
||||
|
||||
A decorator that wraps the passed in function, catches HomeWizardEnergy errors,
|
||||
and reloads the integration when the API was disabled so the reauth flow is
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "homewizard",
|
||||
"name": "HomeWizard Energy",
|
||||
"name": "HomeWizard",
|
||||
"codeowners": ["@DCSBL"],
|
||||
"config_flow": true,
|
||||
"dhcp": [
|
||||
@@ -13,6 +13,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==9.3.0"],
|
||||
"requirements": ["python-homewizard-energy==10.0.0"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from homewizard_energy import HomeWizardEnergy
|
||||
from homewizard_energy.models import Batteries, CombinedModels as DeviceResponseEntry
|
||||
from homewizard_energy.models import Batteries
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -21,69 +16,59 @@ from .helpers import homewizard_exception_handler
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeWizardSelectEntityDescription(SelectEntityDescription):
|
||||
"""Class describing HomeWizard select entities."""
|
||||
|
||||
available_fn: Callable[[DeviceResponseEntry], bool]
|
||||
create_fn: Callable[[DeviceResponseEntry], bool]
|
||||
current_fn: Callable[[DeviceResponseEntry], str | None]
|
||||
set_fn: Callable[[HomeWizardEnergy, str], Awaitable[Any]]
|
||||
|
||||
|
||||
DESCRIPTIONS = [
|
||||
HomeWizardSelectEntityDescription(
|
||||
key="battery_group_mode",
|
||||
translation_key="battery_group_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
options=[Batteries.Mode.ZERO, Batteries.Mode.STANDBY, Batteries.Mode.TO_FULL],
|
||||
available_fn=lambda x: x.batteries is not None,
|
||||
create_fn=lambda x: x.batteries is not None,
|
||||
current_fn=lambda x: x.batteries.mode if x.batteries else None,
|
||||
set_fn=lambda api, mode: api.batteries(mode=Batteries.Mode(mode)),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HomeWizardConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up HomeWizard select based on a config entry."""
|
||||
async_add_entities(
|
||||
HomeWizardSelectEntity(
|
||||
coordinator=entry.runtime_data,
|
||||
description=description,
|
||||
if entry.runtime_data.data.device.supports_batteries():
|
||||
async_add_entities(
|
||||
[
|
||||
HomeWizardBatteryModeSelectEntity(
|
||||
coordinator=entry.runtime_data,
|
||||
)
|
||||
]
|
||||
)
|
||||
for description in DESCRIPTIONS
|
||||
if description.create_fn(entry.runtime_data.data)
|
||||
)
|
||||
|
||||
|
||||
class HomeWizardSelectEntity(HomeWizardEntity, SelectEntity):
|
||||
class HomeWizardBatteryModeSelectEntity(HomeWizardEntity, SelectEntity):
|
||||
"""Defines a HomeWizard select entity."""
|
||||
|
||||
entity_description: HomeWizardSelectEntityDescription
|
||||
entity_description: SelectEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HWEnergyDeviceUpdateCoordinator,
|
||||
description: HomeWizardSelectEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
description = SelectEntityDescription(
|
||||
key="battery_group_mode",
|
||||
translation_key="battery_group_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
options=[
|
||||
str(mode)
|
||||
for mode in (coordinator.data.device.supported_battery_modes() or [])
|
||||
],
|
||||
)
|
||||
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Return the selected entity option to represent the entity state."""
|
||||
return self.entity_description.current_fn(self.coordinator.data)
|
||||
return (
|
||||
self.coordinator.data.batteries.mode
|
||||
if self.coordinator.data.batteries and self.coordinator.data.batteries.mode
|
||||
else None
|
||||
)
|
||||
|
||||
@homewizard_exception_handler
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.entity_description.set_fn(self.coordinator.api, option)
|
||||
await self.coordinator.api.batteries(Batteries.Mode(option))
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@@ -12,13 +12,13 @@
|
||||
"wrong_device": "The configured device is not the same found on this IP address."
|
||||
},
|
||||
"error": {
|
||||
"api_not_enabled": "The local API is disabled. Go to the HomeWizard Energy app and enable the API in the device settings.",
|
||||
"api_not_enabled": "The local API is disabled. Go to the HomeWizard app and enable the API in the device settings.",
|
||||
"authorization_failed": "Failed to authorize, make sure to press the button of the device within 30 seconds",
|
||||
"network_error": "Device unreachable, make sure that you have entered the correct IP address and that the device is available in your network"
|
||||
},
|
||||
"step": {
|
||||
"authorize": {
|
||||
"description": "Press the button on the HomeWizard Energy device for two seconds, then select the button below.",
|
||||
"description": "Press the button on the HomeWizard device for two seconds, then select the button below.",
|
||||
"title": "Authorize"
|
||||
},
|
||||
"discovery_confirm": {
|
||||
@@ -30,7 +30,7 @@
|
||||
"title": "Re-authenticate"
|
||||
},
|
||||
"reauth_enable_api": {
|
||||
"description": "The local API is disabled. Go to the HomeWizard Energy app and enable the API in the device settings."
|
||||
"description": "The local API is disabled. Go to the HomeWizard app and enable the API in the device settings."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
@@ -46,9 +46,9 @@
|
||||
"ip_address": "[%key:common::config_flow::data::ip%]"
|
||||
},
|
||||
"data_description": {
|
||||
"ip_address": "The IP address of your HomeWizard Energy device."
|
||||
"ip_address": "The IP address of your HomeWizard device."
|
||||
},
|
||||
"description": "Enter the IP address of your HomeWizard Energy device to integrate with Home Assistant.",
|
||||
"description": "Enter the IP address of your HomeWizard device to integrate with Home Assistant.",
|
||||
"title": "Configure device"
|
||||
}
|
||||
}
|
||||
@@ -65,7 +65,9 @@
|
||||
"state": {
|
||||
"standby": "Standby",
|
||||
"to_full": "Manual charge mode",
|
||||
"zero": "Zero mode"
|
||||
"zero": "Zero mode",
|
||||
"zero_charge_only": "Zero mode (charge only)",
|
||||
"zero_discharge_only": "Zero mode (discharge only)"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -172,7 +174,7 @@
|
||||
"message": "The local API is unauthorized. Restore API access by following the instructions in the repair issue."
|
||||
},
|
||||
"communication_error": {
|
||||
"message": "An error occurred while communicating with your HomeWizard Energy device"
|
||||
"message": "An error occurred while communicating with your HomeWizard device"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Creates HomeWizard Energy switch entities."""
|
||||
"""Creates HomeWizard switch entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@dennisschroer"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/huisbaasje",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["energyflip"],
|
||||
"requirements": ["energyflip-client==0.2.2"]
|
||||
|
||||
@@ -48,5 +48,19 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:air-humidifier"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_drying": {
|
||||
"trigger": "mdi:arrow-down-bold"
|
||||
},
|
||||
"started_humidifying": {
|
||||
"trigger": "mdi:arrow-up-bold"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:air-humidifier-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:air-humidifier-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted humidifiers to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"set_humidity": "Set humidity for {entity_name}",
|
||||
@@ -86,6 +90,15 @@
|
||||
"message": "Provided humidity {humidity} is not valid. Accepted range is {min_humidity} to {max_humidity}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_humidity": {
|
||||
"description": "Sets the target humidity.",
|
||||
@@ -120,5 +133,47 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Humidifier"
|
||||
"title": "Humidifier",
|
||||
"triggers": {
|
||||
"started_drying": {
|
||||
"description": "Triggers after one or more humidifiers start drying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::humidifier::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::humidifier::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Humidifier started drying"
|
||||
},
|
||||
"started_humidifying": {
|
||||
"description": "Triggers after one or more humidifiers start humidifying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::humidifier::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::humidifier::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Humidifier started humidifying"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more humidifiers turn off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::humidifier::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::humidifier::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Humidifier turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers after one or more humidifiers turn on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::humidifier::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::humidifier::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Humidifier turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
27
homeassistant/components/humidifier/trigger.py
Normal file
27
homeassistant/components/humidifier/trigger.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Provides triggers for humidifiers."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_target_state_attribute_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_ACTION, DOMAIN, HumidifierAction
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_drying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_ACTION, HumidifierAction.DRYING
|
||||
),
|
||||
"started_humidifying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_ACTION, HumidifierAction.HUMIDIFYING
|
||||
),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for humidifiers."""
|
||||
return TRIGGERS
|
||||
20
homeassistant/components/humidifier/triggers.yaml
Normal file
20
homeassistant/components/humidifier/triggers.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: humidifier
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
started_drying: *trigger_common
|
||||
started_humidifying: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@dermotduffy"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/hyperion",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["hyperion"],
|
||||
"requirements": ["hyperion-py==0.7.6"],
|
||||
|
||||
@@ -108,7 +108,7 @@ class IcloudAccount:
|
||||
|
||||
if self.api.requires_2fa:
|
||||
# Trigger a new log in to ensure the user enters the 2FA code again.
|
||||
raise PyiCloudFailedLoginException # noqa: TRY301
|
||||
raise PyiCloudFailedLoginException("2FA Required") # noqa: TRY301
|
||||
|
||||
except PyiCloudFailedLoginException:
|
||||
self.api = None
|
||||
|
||||
@@ -16,7 +16,7 @@ from pyicloud.exceptions import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
@@ -155,8 +155,8 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
CONF_GPS_ACCURACY_THRESHOLD: self._gps_accuracy_threshold,
|
||||
}
|
||||
|
||||
# If this is a password update attempt, update the entry instead of creating one
|
||||
if step_id == "user":
|
||||
# If this is a password update attempt, don't try and creating one
|
||||
if self.source == SOURCE_USER:
|
||||
return self.async_create_entry(title=self._username, data=data)
|
||||
|
||||
entry = await self.async_set_unique_id(self.unique_id)
|
||||
|
||||
@@ -261,7 +261,8 @@ class ImprovBLEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if self._can_identify is None:
|
||||
try:
|
||||
self._can_identify = await self._try_call(device.can_identify())
|
||||
await self._try_call(device.ensure_connected())
|
||||
self._can_identify = device.can_identify
|
||||
except AbortFlow as err:
|
||||
return self.async_abort(reason=err.reason)
|
||||
if self._can_identify:
|
||||
|
||||
@@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/improv_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["py-improv-ble-client==1.0.3"]
|
||||
"requirements": ["py-improv-ble-client==2.0.1"]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
{ "registered_devices": true }
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/incomfort",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@dgomes"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/kmtronic",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pykmtronic"],
|
||||
"requirements": ["pykmtronic==0.3.0"]
|
||||
|
||||
@@ -94,6 +94,8 @@ SERVICE_KNX_EVENT_REGISTER: Final = "event_register"
|
||||
SERVICE_KNX_EXPOSURE_REGISTER: Final = "exposure_register"
|
||||
SERVICE_KNX_READ: Final = "read"
|
||||
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY: Final = "data_secure_group_key_issue"
|
||||
|
||||
|
||||
class KNXConfigEntryData(TypedDict, total=False):
|
||||
"""Config entry for the KNX integration."""
|
||||
@@ -163,8 +165,10 @@ SUPPORTED_PLATFORMS_UI: Final = {
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DATE,
|
||||
Platform.FAN,
|
||||
Platform.DATETIME,
|
||||
Platform.LIGHT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
}
|
||||
@@ -217,3 +221,9 @@ class ClimateConf:
|
||||
FAN_MAX_STEP: Final = "fan_max_step"
|
||||
FAN_SPEED_MODE: Final = "fan_speed_mode"
|
||||
FAN_ZERO_MODE: Final = "fan_zero_mode"
|
||||
|
||||
|
||||
class FanConf:
|
||||
"""Common config keys for fan."""
|
||||
|
||||
MAX_STEP: Final = "max_step"
|
||||
|
||||
146
homeassistant/components/knx/dpt.py
Normal file
146
homeassistant/components/knx/dpt.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""KNX DPT serializer."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import cache
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
from xknx.dpt import DPTBase, DPTComplex, DPTEnum, DPTNumeric
|
||||
from xknx.dpt.dpt_16 import DPTString
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
||||
|
||||
HaDptClass = Literal["numeric", "enum", "complex", "string"]
|
||||
|
||||
|
||||
class DPTInfo(TypedDict):
|
||||
"""DPT information."""
|
||||
|
||||
dpt_class: HaDptClass
|
||||
main: int
|
||||
sub: int | None
|
||||
name: str | None
|
||||
unit: str | None
|
||||
sensor_device_class: SensorDeviceClass | None
|
||||
sensor_state_class: SensorStateClass | None
|
||||
|
||||
|
||||
@cache
|
||||
def get_supported_dpts() -> Mapping[str, DPTInfo]:
|
||||
"""Return a mapping of supported DPTs with HA specific attributes."""
|
||||
dpts = {}
|
||||
for dpt_class in DPTBase.dpt_class_tree():
|
||||
dpt_number_str = dpt_class.dpt_number_str()
|
||||
ha_dpt_class = _ha_dpt_class(dpt_class)
|
||||
dpts[dpt_number_str] = DPTInfo(
|
||||
dpt_class=ha_dpt_class,
|
||||
main=dpt_class.dpt_main_number, # type: ignore[typeddict-item] # checked in xknx unit tests
|
||||
sub=dpt_class.dpt_sub_number,
|
||||
name=dpt_class.value_type,
|
||||
unit=dpt_class.unit,
|
||||
sensor_device_class=_sensor_device_classes.get(dpt_number_str),
|
||||
sensor_state_class=_get_sensor_state_class(ha_dpt_class, dpt_number_str),
|
||||
)
|
||||
return dpts
|
||||
|
||||
|
||||
def _ha_dpt_class(dpt_cls: type[DPTBase]) -> HaDptClass:
|
||||
"""Return the DPT class identifier string."""
|
||||
if issubclass(dpt_cls, DPTNumeric):
|
||||
return "numeric"
|
||||
if issubclass(dpt_cls, DPTEnum):
|
||||
return "enum"
|
||||
if issubclass(dpt_cls, DPTComplex):
|
||||
return "complex"
|
||||
if issubclass(dpt_cls, DPTString):
|
||||
return "string"
|
||||
raise ValueError("Unsupported DPT class")
|
||||
|
||||
|
||||
_sensor_device_classes: Mapping[str, SensorDeviceClass] = {
|
||||
"7.011": SensorDeviceClass.DISTANCE,
|
||||
"7.012": SensorDeviceClass.CURRENT,
|
||||
"7.013": SensorDeviceClass.ILLUMINANCE,
|
||||
"8.012": SensorDeviceClass.DISTANCE,
|
||||
"9.001": SensorDeviceClass.TEMPERATURE,
|
||||
"9.002": SensorDeviceClass.TEMPERATURE_DELTA,
|
||||
"9.004": SensorDeviceClass.ILLUMINANCE,
|
||||
"9.005": SensorDeviceClass.WIND_SPEED,
|
||||
"9.006": SensorDeviceClass.PRESSURE,
|
||||
"9.007": SensorDeviceClass.HUMIDITY,
|
||||
"9.020": SensorDeviceClass.VOLTAGE,
|
||||
"9.021": SensorDeviceClass.CURRENT,
|
||||
"9.024": SensorDeviceClass.POWER,
|
||||
"9.025": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"9.027": SensorDeviceClass.TEMPERATURE,
|
||||
"9.028": SensorDeviceClass.WIND_SPEED,
|
||||
"9.029": SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||
"12.1200": SensorDeviceClass.VOLUME,
|
||||
"12.1201": SensorDeviceClass.VOLUME,
|
||||
"13.002": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"13.010": SensorDeviceClass.ENERGY,
|
||||
"13.012": SensorDeviceClass.REACTIVE_ENERGY,
|
||||
"13.013": SensorDeviceClass.ENERGY,
|
||||
"13.015": SensorDeviceClass.REACTIVE_ENERGY,
|
||||
"13.016": SensorDeviceClass.ENERGY,
|
||||
"13.1200": SensorDeviceClass.VOLUME,
|
||||
"13.1201": SensorDeviceClass.VOLUME,
|
||||
"14.010": SensorDeviceClass.AREA,
|
||||
"14.019": SensorDeviceClass.CURRENT,
|
||||
"14.027": SensorDeviceClass.VOLTAGE,
|
||||
"14.028": SensorDeviceClass.VOLTAGE,
|
||||
"14.030": SensorDeviceClass.VOLTAGE,
|
||||
"14.031": SensorDeviceClass.ENERGY,
|
||||
"14.033": SensorDeviceClass.FREQUENCY,
|
||||
"14.037": SensorDeviceClass.ENERGY_STORAGE,
|
||||
"14.039": SensorDeviceClass.DISTANCE,
|
||||
"14.051": SensorDeviceClass.WEIGHT,
|
||||
"14.056": SensorDeviceClass.POWER,
|
||||
"14.057": SensorDeviceClass.POWER_FACTOR,
|
||||
"14.058": SensorDeviceClass.PRESSURE,
|
||||
"14.065": SensorDeviceClass.SPEED,
|
||||
"14.068": SensorDeviceClass.TEMPERATURE,
|
||||
"14.069": SensorDeviceClass.TEMPERATURE,
|
||||
"14.070": SensorDeviceClass.TEMPERATURE_DELTA,
|
||||
"14.076": SensorDeviceClass.VOLUME,
|
||||
"14.077": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"14.080": SensorDeviceClass.APPARENT_POWER,
|
||||
"14.1200": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"14.1201": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"29.010": SensorDeviceClass.ENERGY,
|
||||
"29.012": SensorDeviceClass.REACTIVE_ENERGY,
|
||||
}
|
||||
|
||||
_sensor_state_class_overrides: Mapping[str, SensorStateClass | None] = {
|
||||
"5.003": SensorStateClass.MEASUREMENT_ANGLE, # DPTAngle
|
||||
"5.006": None, # DPTTariff
|
||||
"7.010": None, # DPTPropDataType
|
||||
"8.011": SensorStateClass.MEASUREMENT_ANGLE, # DPTRotationAngle
|
||||
"9.026": SensorStateClass.TOTAL_INCREASING, # DPTRainAmount
|
||||
"12.1200": SensorStateClass.TOTAL, # DPTVolumeLiquidLitre
|
||||
"12.1201": SensorStateClass.TOTAL, # DPTVolumeM3
|
||||
"13.010": SensorStateClass.TOTAL, # DPTActiveEnergy
|
||||
"13.011": SensorStateClass.TOTAL, # DPTApparantEnergy
|
||||
"13.012": SensorStateClass.TOTAL, # DPTReactiveEnergy
|
||||
"14.007": SensorStateClass.MEASUREMENT_ANGLE, # DPTAngleDeg
|
||||
"14.037": SensorStateClass.TOTAL, # DPTHeatQuantity
|
||||
"14.051": SensorStateClass.TOTAL, # DPTMass
|
||||
"14.055": SensorStateClass.MEASUREMENT_ANGLE, # DPTPhaseAngleDeg
|
||||
"14.031": SensorStateClass.TOTAL_INCREASING, # DPTEnergy
|
||||
"17.001": None, # DPTSceneNumber
|
||||
"29.010": SensorStateClass.TOTAL, # DPTActiveEnergy8Byte
|
||||
"29.011": SensorStateClass.TOTAL, # DPTApparantEnergy8Byte
|
||||
"29.012": SensorStateClass.TOTAL, # DPTReactiveEnergy8Byte
|
||||
}
|
||||
|
||||
|
||||
def _get_sensor_state_class(
|
||||
ha_dpt_class: HaDptClass, dpt_number_str: str
|
||||
) -> SensorStateClass | None:
|
||||
"""Return the SensorStateClass for a given DPT."""
|
||||
if ha_dpt_class != "numeric":
|
||||
return None
|
||||
|
||||
return _sensor_state_class_overrides.get(
|
||||
dpt_number_str,
|
||||
SensorStateClass.MEASUREMENT,
|
||||
)
|
||||
@@ -77,6 +77,11 @@ class _KnxEntityBase(Entity):
|
||||
"""Store register state change callback and start device object."""
|
||||
self._device.register_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_add(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_module.add_to_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
)
|
||||
# super call needed to have methods of multi-inherited classes called
|
||||
# eg. for restoring state (like _KNXSwitch)
|
||||
await super().async_added_to_hass()
|
||||
@@ -85,6 +90,11 @@ class _KnxEntityBase(Entity):
|
||||
"""Disconnect device object when removed."""
|
||||
self._device.unregister_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_remove(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_module.remove_from_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
)
|
||||
|
||||
|
||||
class KnxYamlEntity(_KnxEntityBase):
|
||||
|
||||
@@ -3,15 +3,19 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
from typing import Any, Final
|
||||
from typing import Any
|
||||
|
||||
from propcache.api import cached_property
|
||||
from xknx.devices import Fan as XknxFan
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.fan import FanEntity, FanEntityFeature
|
||||
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.percentage import (
|
||||
percentage_to_ranged_value,
|
||||
@@ -19,12 +23,19 @@ from homeassistant.util.percentage import (
|
||||
)
|
||||
from homeassistant.util.scaling import int_states_in_range
|
||||
|
||||
from .const import KNX_ADDRESS, KNX_MODULE_KEY
|
||||
from .entity import KnxYamlEntity
|
||||
from .const import CONF_SYNC_STATE, DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY, FanConf
|
||||
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
|
||||
from .knx_module import KNXModule
|
||||
from .schema import FanSchema
|
||||
|
||||
DEFAULT_PERCENTAGE: Final = 50
|
||||
from .storage.const import (
|
||||
CONF_ENTITY,
|
||||
CONF_GA_OSCILLATION,
|
||||
CONF_GA_SPEED,
|
||||
CONF_GA_STEP,
|
||||
CONF_GA_SWITCH,
|
||||
CONF_SPEED,
|
||||
)
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -34,61 +45,55 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up fan(s) for KNX platform."""
|
||||
knx_module = hass.data[KNX_MODULE_KEY]
|
||||
config: list[ConfigType] = knx_module.config_yaml[Platform.FAN]
|
||||
platform = async_get_current_platform()
|
||||
knx_module.config_store.add_platform(
|
||||
platform=Platform.FAN,
|
||||
controller=KnxUiEntityPlatformController(
|
||||
knx_module=knx_module,
|
||||
entity_platform=platform,
|
||||
entity_class=KnxUiFan,
|
||||
),
|
||||
)
|
||||
|
||||
async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config)
|
||||
entities: list[_KnxFan] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.FAN):
|
||||
entities.extend(
|
||||
KnxYamlFan(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
)
|
||||
if ui_config := knx_module.config_store.data["entities"].get(Platform.FAN):
|
||||
entities.extend(
|
||||
KnxUiFan(knx_module, unique_id, config)
|
||||
for unique_id, config in ui_config.items()
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class KNXFan(KnxYamlEntity, FanEntity):
|
||||
class _KnxFan(FanEntity):
|
||||
"""Representation of a KNX fan."""
|
||||
|
||||
_device: XknxFan
|
||||
_step_range: tuple[int, int] | None
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of KNX fan."""
|
||||
max_step = config.get(FanSchema.CONF_MAX_STEP)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxFan(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_speed=config.get(KNX_ADDRESS),
|
||||
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
|
||||
group_address_oscillation=config.get(
|
||||
FanSchema.CONF_OSCILLATION_ADDRESS
|
||||
),
|
||||
group_address_oscillation_state=config.get(
|
||||
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
|
||||
),
|
||||
max_step=max_step,
|
||||
),
|
||||
)
|
||||
# FanSpeedMode.STEP if max_step is set
|
||||
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
def _get_knx_speed(self, percentage: int) -> int:
|
||||
"""Convert percentage to KNX speed value."""
|
||||
if self._step_range is not None:
|
||||
return math.ceil(percentage_to_ranged_value(self._step_range, percentage))
|
||||
return percentage
|
||||
|
||||
async def async_set_percentage(self, percentage: int) -> None:
|
||||
"""Set the speed of the fan, as a percentage."""
|
||||
if self._step_range:
|
||||
step = math.ceil(percentage_to_ranged_value(self._step_range, percentage))
|
||||
await self._device.set_speed(step)
|
||||
else:
|
||||
await self._device.set_speed(percentage)
|
||||
await self._device.set_speed(self._get_knx_speed(percentage))
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def supported_features(self) -> FanEntityFeature:
|
||||
"""Flag supported features."""
|
||||
flags = (
|
||||
FanEntityFeature.SET_SPEED
|
||||
| FanEntityFeature.TURN_ON
|
||||
| FanEntityFeature.TURN_OFF
|
||||
)
|
||||
|
||||
flags = FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF
|
||||
if self._device.speed.initialized:
|
||||
flags |= FanEntityFeature.SET_SPEED
|
||||
if self._device.supports_oscillation:
|
||||
flags |= FanEntityFeature.OSCILLATE
|
||||
|
||||
return flags
|
||||
|
||||
@property
|
||||
@@ -103,13 +108,18 @@ class KNXFan(KnxYamlEntity, FanEntity):
|
||||
)
|
||||
return self._device.current_speed
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def speed_count(self) -> int:
|
||||
"""Return the number of speeds the fan supports."""
|
||||
if self._step_range is None:
|
||||
return super().speed_count
|
||||
return int_states_in_range(self._step_range)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the current fan state of the device."""
|
||||
return self._device.is_on
|
||||
|
||||
async def async_turn_on(
|
||||
self,
|
||||
percentage: int | None = None,
|
||||
@@ -117,14 +127,12 @@ class KNXFan(KnxYamlEntity, FanEntity):
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Turn on the fan."""
|
||||
if percentage is None:
|
||||
await self.async_set_percentage(DEFAULT_PERCENTAGE)
|
||||
else:
|
||||
await self.async_set_percentage(percentage)
|
||||
speed = self._get_knx_speed(percentage) if percentage is not None else None
|
||||
await self._device.turn_on(speed)
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the fan off."""
|
||||
await self.async_set_percentage(0)
|
||||
await self._device.turn_off()
|
||||
|
||||
async def async_oscillate(self, oscillating: bool) -> None:
|
||||
"""Oscillate the fan."""
|
||||
@@ -134,3 +142,83 @@ class KNXFan(KnxYamlEntity, FanEntity):
|
||||
def oscillating(self) -> bool | None:
|
||||
"""Return whether or not the fan is currently oscillating."""
|
||||
return self._device.current_oscillation
|
||||
|
||||
|
||||
class KnxYamlFan(_KnxFan, KnxYamlEntity):
|
||||
"""Representation of a KNX fan configured from YAML."""
|
||||
|
||||
_device: XknxFan
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of KNX fan."""
|
||||
max_step = config.get(FanConf.MAX_STEP)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxFan(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_speed=config.get(KNX_ADDRESS),
|
||||
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
|
||||
group_address_oscillation=config.get(
|
||||
FanSchema.CONF_OSCILLATION_ADDRESS
|
||||
),
|
||||
group_address_oscillation_state=config.get(
|
||||
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
|
||||
),
|
||||
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
|
||||
group_address_switch_state=config.get(
|
||||
FanSchema.CONF_SWITCH_STATE_ADDRESS
|
||||
),
|
||||
max_step=max_step,
|
||||
sync_state=config.get(CONF_SYNC_STATE, True),
|
||||
),
|
||||
)
|
||||
# FanSpeedMode.STEP if max_step is set
|
||||
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
|
||||
self._attr_unique_id = str(self._device.speed.group_address)
|
||||
|
||||
|
||||
class KnxUiFan(_KnxFan, KnxUiEntity):
|
||||
"""Representation of a KNX fan configured from UI."""
|
||||
|
||||
_device: XknxFan
|
||||
|
||||
def __init__(
|
||||
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize of KNX fan."""
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
# max_step is required for step mode, thus can be used to differentiate modes
|
||||
max_step: int | None = knx_conf.get(CONF_SPEED, FanConf.MAX_STEP)
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
if max_step:
|
||||
# step control
|
||||
speed_write = knx_conf.get_write(CONF_SPEED, CONF_GA_STEP)
|
||||
speed_state = knx_conf.get_state_and_passive(CONF_SPEED, CONF_GA_STEP)
|
||||
else:
|
||||
# percentage control
|
||||
speed_write = knx_conf.get_write(CONF_SPEED, CONF_GA_SPEED)
|
||||
speed_state = knx_conf.get_state_and_passive(CONF_SPEED, CONF_GA_SPEED)
|
||||
|
||||
self._device = XknxFan(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
group_address_speed=speed_write,
|
||||
group_address_speed_state=speed_state,
|
||||
group_address_oscillation=knx_conf.get_write(CONF_GA_OSCILLATION),
|
||||
group_address_oscillation_state=knx_conf.get_state_and_passive(
|
||||
CONF_GA_OSCILLATION
|
||||
),
|
||||
group_address_switch=knx_conf.get_write(CONF_GA_SWITCH),
|
||||
group_address_switch_state=knx_conf.get_state_and_passive(CONF_GA_SWITCH),
|
||||
max_step=max_step,
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
)
|
||||
# FanSpeedMode.STEP if max_step is set
|
||||
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
|
||||
|
||||
@@ -56,6 +56,7 @@ from .const import (
|
||||
from .device import KNXInterfaceDevice
|
||||
from .expose import KNXExposeSensor, KNXExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
from .storage.config_store import KNXConfigStore
|
||||
from .telegrams import Telegrams
|
||||
|
||||
@@ -107,8 +108,12 @@ class KNXModule:
|
||||
|
||||
self._address_filter_transcoder: dict[AddressFilter, type[DPTBase]] = {}
|
||||
self.group_address_transcoder: dict[DeviceGroupAddress, type[DPTBase]] = {}
|
||||
self.group_address_entities: dict[
|
||||
DeviceGroupAddress, set[tuple[str, str]] # {(platform, unique_id),}
|
||||
] = {}
|
||||
self.knx_event_callback: TelegramQueue.Callback = self.register_event_callback()
|
||||
|
||||
self.entry.async_on_unload(data_secure_group_key_issue_dispatcher(self))
|
||||
self.entry.async_on_unload(
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.stop)
|
||||
)
|
||||
@@ -225,6 +230,29 @@ class KNXModule:
|
||||
threaded=True,
|
||||
)
|
||||
|
||||
def add_to_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str], # (platform, unique_id)
|
||||
) -> None:
|
||||
"""Register entity in group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
if ga not in self.group_address_entities:
|
||||
self.group_address_entities[ga] = set()
|
||||
self.group_address_entities[ga].add(identifier)
|
||||
|
||||
def remove_from_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str],
|
||||
) -> None:
|
||||
"""Unregister entity from group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
if ga in self.group_address_entities:
|
||||
self.group_address_entities[ga].discard(identifier)
|
||||
if not self.group_address_entities[ga]:
|
||||
del self.group_address_entities[ga]
|
||||
|
||||
def connection_state_changed_cb(self, state: XknxConnectionState) -> None:
|
||||
"""Call invoked after a KNX connection state change was received."""
|
||||
self.connected = state == XknxConnectionState.CONNECTED
|
||||
|
||||
@@ -9,11 +9,11 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"xknx==3.12.0",
|
||||
"xknx==3.13.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
"knx-frontend==2025.12.19.150946"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ rules:
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
175
homeassistant/components/knx/repairs.py
Normal file
175
homeassistant/components/knx/repairs.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""Repairs for KNX integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
import voluptuous as vol
|
||||
from xknx.exceptions.exception import InvalidSecureConfiguration
|
||||
from xknx.telegram import GroupAddress, IndividualAddress, Telegram
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir, selector
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .knx_module import KNXModule
|
||||
|
||||
from .const import (
|
||||
CONF_KNX_KNXKEY_PASSWORD,
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
KNXConfigEntryData,
|
||||
)
|
||||
from .storage.keyring import DEFAULT_KNX_KEYRING_FILENAME, save_uploaded_knxkeys_file
|
||||
from .telegrams import SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM, TelegramDict
|
||||
|
||||
CONF_KEYRING_FILE: Final = "knxkeys_file"
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == REPAIR_ISSUE_DATA_SECURE_GROUP_KEY:
|
||||
return DataSecureGroupIssueRepairFlow()
|
||||
# If KNX adds confirm-only repairs in the future, this should be changed
|
||||
# to return a ConfirmRepairFlow instead of raising a ValueError
|
||||
raise ValueError(f"unknown repair {issue_id}")
|
||||
|
||||
|
||||
######################
|
||||
# DataSecure key issue
|
||||
######################
|
||||
|
||||
|
||||
@callback
|
||||
def data_secure_group_key_issue_dispatcher(knx_module: KNXModule) -> Callable[[], None]:
|
||||
"""Watcher for DataSecure group key issues."""
|
||||
return async_dispatcher_connect(
|
||||
knx_module.hass,
|
||||
signal=SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
target=partial(_data_secure_group_key_issue_handler, knx_module),
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _data_secure_group_key_issue_handler(
|
||||
knx_module: KNXModule, telegram: Telegram, telegram_dict: TelegramDict
|
||||
) -> None:
|
||||
"""Handle DataSecure group key issue telegrams."""
|
||||
if telegram.destination_address not in knx_module.group_address_entities:
|
||||
# Only report issues for configured group addresses
|
||||
return
|
||||
|
||||
issue_registry = ir.async_get(knx_module.hass)
|
||||
new_ga = str(telegram.destination_address)
|
||||
new_ia = str(telegram.source_address)
|
||||
new_data = {new_ga: new_ia}
|
||||
|
||||
if existing_issue := issue_registry.async_get_issue(
|
||||
DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY
|
||||
):
|
||||
assert isinstance(existing_issue.data, dict)
|
||||
existing_data: dict[str, str] = existing_issue.data # type: ignore[assignment]
|
||||
if new_ga in existing_data:
|
||||
current_ias = existing_data[new_ga].split(", ")
|
||||
if new_ia in current_ias:
|
||||
return
|
||||
current_ias = sorted([*current_ias, new_ia], key=IndividualAddress)
|
||||
new_data[new_ga] = ", ".join(current_ias)
|
||||
new_data_unsorted = existing_data | new_data
|
||||
new_data = {
|
||||
key: new_data_unsorted[key]
|
||||
for key in sorted(new_data_unsorted, key=GroupAddress)
|
||||
}
|
||||
|
||||
issue_registry.async_get_or_create(
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
data=new_data, # type: ignore[arg-type]
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key=REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
translation_placeholders={
|
||||
"addresses": "\n".join(
|
||||
f"`{ga}` from {ias}" for ga, ias in new_data.items()
|
||||
),
|
||||
"interface": str(knx_module.xknx.current_address),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class DataSecureGroupIssueRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow for outdated DataSecure keys."""
|
||||
|
||||
@callback
|
||||
def _async_get_placeholders(self) -> dict[str, str]:
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
|
||||
assert issue is not None
|
||||
return issue.translation_placeholders or {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_secure_knxkeys()
|
||||
|
||||
async def async_step_secure_knxkeys(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Manage upload of new KNX Keyring file."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
password = user_input[CONF_KNX_KNXKEY_PASSWORD]
|
||||
keyring = None
|
||||
try:
|
||||
keyring = await save_uploaded_knxkeys_file(
|
||||
self.hass,
|
||||
uploaded_file_id=user_input[CONF_KEYRING_FILE],
|
||||
password=password,
|
||||
)
|
||||
except InvalidSecureConfiguration:
|
||||
errors[CONF_KNX_KNXKEY_PASSWORD] = "keyfile_invalid_signature"
|
||||
|
||||
if not errors and keyring:
|
||||
new_entry_data = KNXConfigEntryData(
|
||||
knxkeys_filename=f"{DOMAIN}/{DEFAULT_KNX_KEYRING_FILENAME}",
|
||||
knxkeys_password=password,
|
||||
)
|
||||
return self.finish_flow(new_entry_data)
|
||||
|
||||
fields = {
|
||||
vol.Required(CONF_KEYRING_FILE): selector.FileSelector(
|
||||
config=selector.FileSelectorConfig(accept=".knxkeys")
|
||||
),
|
||||
vol.Required(CONF_KNX_KNXKEY_PASSWORD): selector.TextSelector(),
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="secure_knxkeys",
|
||||
data_schema=vol.Schema(fields),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@callback
|
||||
def finish_flow(
|
||||
self, new_entry_data: KNXConfigEntryData
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Finish the repair flow. Reload the config entry."""
|
||||
knx_config_entries = self.hass.config_entries.async_entries(DOMAIN)
|
||||
if knx_config_entries:
|
||||
config_entry = knx_config_entries[0] # single_config_entry
|
||||
new_data = {**config_entry.data, **new_entry_data}
|
||||
self.hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
self.hass.config_entries.async_schedule_reload(config_entry.entry_id)
|
||||
return self.async_create_entry(data={})
|
||||
@@ -59,6 +59,7 @@ from .const import (
|
||||
ClimateConf,
|
||||
ColorTempModes,
|
||||
CoverConf,
|
||||
FanConf,
|
||||
FanZeroMode,
|
||||
)
|
||||
from .validation import (
|
||||
@@ -575,20 +576,40 @@ class FanSchema(KNXPlatformSchema):
|
||||
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
|
||||
CONF_OSCILLATION_ADDRESS = "oscillation_address"
|
||||
CONF_OSCILLATION_STATE_ADDRESS = "oscillation_state_address"
|
||||
CONF_MAX_STEP = "max_step"
|
||||
CONF_SWITCH_ADDRESS = "switch_address"
|
||||
CONF_SWITCH_STATE_ADDRESS = "switch_state_address"
|
||||
|
||||
DEFAULT_NAME = "KNX Fan"
|
||||
|
||||
ENTITY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_MAX_STEP): cv.byte,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
ENTITY_SCHEMA = vol.All(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_SWITCH_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_SWITCH_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(FanConf.MAX_STEP): cv.byte,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
|
||||
}
|
||||
),
|
||||
vol.Any(
|
||||
vol.Schema(
|
||||
{vol.Required(KNX_ADDRESS): object},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
),
|
||||
vol.Schema(
|
||||
{vol.Required(CONF_SWITCH_ADDRESS): object},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
),
|
||||
msg=(
|
||||
f"At least one of '{KNX_ADDRESS}' or"
|
||||
f" '{CONF_SWITCH_ADDRESS}' is required."
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.core.connection_state import XknxConnectionState, XknxConnectionType
|
||||
from xknx.devices import Device as XknxDevice, Sensor as XknxSensor
|
||||
|
||||
@@ -25,20 +25,32 @@ from homeassistant.const import (
|
||||
CONF_ENTITY_CATEGORY,
|
||||
CONF_NAME,
|
||||
CONF_TYPE,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
EntityCategory,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
from homeassistant.util.enum import try_parse_enum
|
||||
|
||||
from .const import ATTR_SOURCE, KNX_MODULE_KEY
|
||||
from .entity import KnxYamlEntity
|
||||
from .const import ATTR_SOURCE, CONF_SYNC_STATE, DOMAIN, KNX_MODULE_KEY
|
||||
from .dpt import get_supported_dpts
|
||||
from .entity import (
|
||||
KnxUiEntity,
|
||||
KnxUiEntityPlatformController,
|
||||
KnxYamlEntity,
|
||||
_KnxEntityBase,
|
||||
)
|
||||
from .knx_module import KNXModule
|
||||
from .schema import SensorSchema
|
||||
from .storage.const import CONF_ALWAYS_CALLBACK, CONF_ENTITY, CONF_GA_SENSOR
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
@@ -122,58 +134,41 @@ async def async_setup_entry(
|
||||
config_entry: config_entries.ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensor(s) for KNX platform."""
|
||||
"""Set up entities for KNX platform."""
|
||||
knx_module = hass.data[KNX_MODULE_KEY]
|
||||
platform = async_get_current_platform()
|
||||
knx_module.config_store.add_platform(
|
||||
platform=Platform.SENSOR,
|
||||
controller=KnxUiEntityPlatformController(
|
||||
knx_module=knx_module,
|
||||
entity_platform=platform,
|
||||
entity_class=KnxUiSensor,
|
||||
),
|
||||
)
|
||||
|
||||
entities: list[SensorEntity] = []
|
||||
entities.extend(
|
||||
KNXSystemSensor(knx_module, description)
|
||||
for description in SYSTEM_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
config: list[ConfigType] | None = knx_module.config_yaml.get(Platform.SENSOR)
|
||||
if config:
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.SENSOR):
|
||||
entities.extend(
|
||||
KNXSensor(knx_module, entity_config) for entity_config in config
|
||||
KnxYamlSensor(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
)
|
||||
if ui_config := knx_module.config_store.data["entities"].get(Platform.SENSOR):
|
||||
entities.extend(
|
||||
KnxUiSensor(knx_module, unique_id, config)
|
||||
for unique_id, config in ui_config.items()
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor:
|
||||
"""Return a KNX sensor to be used within XKNX."""
|
||||
return XknxSensor(
|
||||
xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
|
||||
sync_state=config[SensorSchema.CONF_SYNC_STATE],
|
||||
always_callback=True,
|
||||
value_type=config[CONF_TYPE],
|
||||
)
|
||||
|
||||
|
||||
class KNXSensor(KnxYamlEntity, RestoreSensor):
|
||||
class _KnxSensor(RestoreSensor, _KnxEntityBase):
|
||||
"""Representation of a KNX sensor."""
|
||||
|
||||
_device: XknxSensor
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of a KNX sensor."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=_create_sensor(knx_module.xknx, config),
|
||||
)
|
||||
if device_class := config.get(CONF_DEVICE_CLASS):
|
||||
self._attr_device_class = device_class
|
||||
else:
|
||||
self._attr_device_class = try_parse_enum(
|
||||
SensorDeviceClass, self._device.ha_device_class()
|
||||
)
|
||||
|
||||
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
|
||||
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
self._attr_extra_state_attributes = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
if (
|
||||
@@ -198,6 +193,89 @@ class KNXSensor(KnxYamlEntity, RestoreSensor):
|
||||
super().after_update_callback(device)
|
||||
|
||||
|
||||
class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
|
||||
"""Representation of a KNX sensor configured from YAML."""
|
||||
|
||||
_device: XknxSensor
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize of a KNX sensor."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxSensor(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
always_callback=True,
|
||||
value_type=config[CONF_TYPE],
|
||||
),
|
||||
)
|
||||
if device_class := config.get(CONF_DEVICE_CLASS):
|
||||
self._attr_device_class = device_class
|
||||
else:
|
||||
self._attr_device_class = try_parse_enum(
|
||||
SensorDeviceClass, self._device.ha_device_class()
|
||||
)
|
||||
|
||||
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
|
||||
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
|
||||
self._attr_state_class = config.get(CONF_STATE_CLASS)
|
||||
self._attr_extra_state_attributes = {}
|
||||
|
||||
|
||||
class KnxUiSensor(_KnxSensor, KnxUiEntity):
|
||||
"""Representation of a KNX sensor configured from the UI."""
|
||||
|
||||
_device: XknxSensor
|
||||
|
||||
def __init__(
|
||||
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize KNX sensor."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
dpt_string = knx_conf.get_dpt(CONF_GA_SENSOR)
|
||||
assert dpt_string is not None # required for sensor
|
||||
dpt_info = get_supported_dpts()[dpt_string]
|
||||
|
||||
self._device = XknxSensor(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
group_address_state=knx_conf.get_state_and_passive(CONF_GA_SENSOR),
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
always_callback=True,
|
||||
value_type=dpt_string,
|
||||
)
|
||||
|
||||
if device_class_override := knx_conf.get(CONF_DEVICE_CLASS):
|
||||
self._attr_device_class = try_parse_enum(
|
||||
SensorDeviceClass, device_class_override
|
||||
)
|
||||
else:
|
||||
self._attr_device_class = dpt_info["sensor_device_class"]
|
||||
|
||||
if state_class_override := knx_conf.get(CONF_STATE_CLASS):
|
||||
self._attr_state_class = try_parse_enum(
|
||||
SensorStateClass, state_class_override
|
||||
)
|
||||
else:
|
||||
self._attr_state_class = dpt_info["sensor_state_class"]
|
||||
|
||||
self._attr_native_unit_of_measurement = (
|
||||
knx_conf.get(CONF_UNIT_OF_MEASUREMENT) or dpt_info["unit"]
|
||||
)
|
||||
|
||||
self._attr_force_update = knx_conf.get(CONF_ALWAYS_CALLBACK, default=False)
|
||||
self._attr_extra_state_attributes = {}
|
||||
|
||||
|
||||
class KNXSystemSensor(SensorEntity):
|
||||
"""Representation of a KNX system sensor."""
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user