mirror of
https://github.com/home-assistant/core.git
synced 2025-12-25 09:18:27 +00:00
Compare commits
86 Commits
window_cov
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d6ae8f907 | ||
|
|
2683b893c4 | ||
|
|
2b5de0db01 | ||
|
|
83d4f8eedc | ||
|
|
89247e9069 | ||
|
|
5f996e700f | ||
|
|
8f44eb6652 | ||
|
|
31dadd3102 | ||
|
|
4c74a57b63 | ||
|
|
750744f332 | ||
|
|
03442c5e51 | ||
|
|
26774c20c7 | ||
|
|
29201ac5d6 | ||
|
|
a6938127ea | ||
|
|
65d7f22072 | ||
|
|
0730c707e9 | ||
|
|
2e3eb0f9af | ||
|
|
2b5823c264 | ||
|
|
95165022db | ||
|
|
7c71c0377f | ||
|
|
b07b699e79 | ||
|
|
34db548725 | ||
|
|
5150efd63f | ||
|
|
0525c75686 | ||
|
|
7c14862f62 | ||
|
|
19f8d9d41b | ||
|
|
af1218876c | ||
|
|
9715a7cc32 | ||
|
|
b87b72ab01 | ||
|
|
0f3f16fabe | ||
|
|
85311e3def | ||
|
|
a33a4b6d9d | ||
|
|
02f412feb1 | ||
|
|
b3c78d4207 | ||
|
|
a3dec29c72 | ||
|
|
aa20a74a76 | ||
|
|
c0fa6ad2e0 | ||
|
|
5107b7012d | ||
|
|
bcc5985c8b | ||
|
|
5933c09a1d | ||
|
|
5f1e6f3633 | ||
|
|
6bd8d123ed | ||
|
|
50a51b5ecc | ||
|
|
c115b418ac | ||
|
|
2160827a50 | ||
|
|
82d84d7adf | ||
|
|
3e498d289b | ||
|
|
e6d8092c37 | ||
|
|
2e4f95c099 | ||
|
|
9f54b09423 | ||
|
|
8361d65d23 | ||
|
|
7a82aa4803 | ||
|
|
02ab11c1bd | ||
|
|
64f0a615df | ||
|
|
3e889616f2 | ||
|
|
bdbe2a6346 | ||
|
|
016d492342 | ||
|
|
9ce46c0937 | ||
|
|
8d96aee96e | ||
|
|
7083a0fdb7 | ||
|
|
e3976923b2 | ||
|
|
0b20417895 | ||
|
|
ed46c30b10 | ||
|
|
38f4cf0575 | ||
|
|
7b60cc3a80 | ||
|
|
fe0c92b6c5 | ||
|
|
c4386b4360 | ||
|
|
d4d26bccc1 | ||
|
|
550b7bf7ba | ||
|
|
6ff472ff87 | ||
|
|
ca30d8b1c2 | ||
|
|
aae98a77d5 | ||
|
|
30b7b24ddd | ||
|
|
a972a6d43a | ||
|
|
6e06c015df | ||
|
|
01c3e88e0f | ||
|
|
fd9064376a | ||
|
|
9eb5d452cf | ||
|
|
966209e4b6 | ||
|
|
a09ac94db9 | ||
|
|
0710cf3e6b | ||
|
|
a81f2a63c0 | ||
|
|
6ef2d0d0a3 | ||
|
|
911ea67a6d | ||
|
|
28dc32d5dc | ||
|
|
c95416cb48 |
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@@ -197,7 +197,7 @@ jobs:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
@@ -405,7 +405,7 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.7.1
|
||||
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -92,6 +92,7 @@ pip-selfcheck.json
|
||||
venv
|
||||
.venv
|
||||
Pipfile*
|
||||
uv.lock
|
||||
share/*
|
||||
/Scripts/
|
||||
|
||||
|
||||
6
CODEOWNERS
generated
6
CODEOWNERS
generated
@@ -530,6 +530,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/flo/ @dmulcahey
|
||||
/homeassistant/components/flume/ @ChrisMandich @bdraco @jeeftor
|
||||
/tests/components/flume/ @ChrisMandich @bdraco @jeeftor
|
||||
/homeassistant/components/fluss/ @fluss
|
||||
/tests/components/fluss/ @fluss
|
||||
/homeassistant/components/flux_led/ @icemanch
|
||||
/tests/components/flux_led/ @icemanch
|
||||
/homeassistant/components/forecast_solar/ @klaasnicolaas @frenck
|
||||
@@ -1695,8 +1697,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/trafikverket_train/ @gjohansson-ST
|
||||
/homeassistant/components/trafikverket_weatherstation/ @gjohansson-ST
|
||||
/tests/components/trafikverket_weatherstation/ @gjohansson-ST
|
||||
/homeassistant/components/transmission/ @engrbm87 @JPHutchins
|
||||
/tests/components/transmission/ @engrbm87 @JPHutchins
|
||||
/homeassistant/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
|
||||
/tests/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
|
||||
/homeassistant/components/trend/ @jpbede
|
||||
/tests/components/trend/ @jpbede
|
||||
/homeassistant/components/triggercmd/ @rvmey
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:
|
||||
|
||||
9
homeassistant/components/airobot/icons.json
Normal file
9
homeassistant/components/airobot/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"default": "mdi:delta"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
99
homeassistant/components/airobot/number.py
Normal file
99
homeassistant/components/airobot/number.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Number platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyairobotrest.const import HYSTERESIS_BAND_MAX, HYSTERESIS_BAND_MIN
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotNumberEntityDescription(NumberEntityDescription):
|
||||
"""Describes Airobot number entity."""
|
||||
|
||||
value_fn: Callable[[AirobotDataUpdateCoordinator], float]
|
||||
set_value_fn: Callable[[AirobotDataUpdateCoordinator, float], Awaitable[None]]
|
||||
|
||||
|
||||
NUMBERS: tuple[AirobotNumberEntityDescription, ...] = (
|
||||
AirobotNumberEntityDescription(
|
||||
key="hysteresis_band",
|
||||
translation_key="hysteresis_band",
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
native_min_value=HYSTERESIS_BAND_MIN / 10.0,
|
||||
native_max_value=HYSTERESIS_BAND_MAX / 10.0,
|
||||
native_step=0.1,
|
||||
value_fn=lambda coordinator: coordinator.data.settings.hysteresis_band,
|
||||
set_value_fn=lambda coordinator, value: coordinator.client.set_hysteresis_band(
|
||||
value
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot number platform."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirobotNumber(coordinator, description) for description in NUMBERS
|
||||
)
|
||||
|
||||
|
||||
class AirobotNumber(AirobotEntity, NumberEntity):
|
||||
"""Representation of an Airobot number entity."""
|
||||
|
||||
entity_description: AirobotNumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
description: AirobotNumberEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the number entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the value."""
|
||||
try:
|
||||
await self.entity_description.set_value_fn(self.coordinator, value)
|
||||
except AirobotError as err:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_value_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
else:
|
||||
await self.coordinator.async_request_refresh()
|
||||
@@ -48,7 +48,7 @@ rules:
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no dynamic device discovery needed.
|
||||
@@ -57,7 +57,7 @@ rules:
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
|
||||
@@ -44,6 +44,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"number": {
|
||||
"hysteresis_band": {
|
||||
"name": "Hysteresis band"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"air_temperature": {
|
||||
"name": "Air temperature"
|
||||
@@ -74,6 +79,9 @@
|
||||
},
|
||||
"set_temperature_failed": {
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
},
|
||||
"set_value_failed": {
|
||||
"message": "Failed to set value: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
@@ -30,14 +30,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_credentials(
|
||||
auth: MSOB2CAuth, account_number: str
|
||||
) -> str | MSOB2CAuth:
|
||||
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
|
||||
"""Validate the provided credentials."""
|
||||
try:
|
||||
await auth.send_login_request()
|
||||
@@ -46,6 +43,33 @@ async def validate_credentials(
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return "unknown"
|
||||
return auth
|
||||
|
||||
|
||||
def humanize_account_data(account: dict) -> str:
|
||||
"""Convert an account data into a human-readable format."""
|
||||
if account["address"]["company_name"] != "":
|
||||
return f"{account['account_number']} - {account['address']['company_name']}"
|
||||
if account["address"]["building_name"] != "":
|
||||
return f"{account['account_number']} - {account['address']['building_name']}"
|
||||
return f"{account['account_number']} - {account['address']['postcode']}"
|
||||
|
||||
|
||||
async def get_accounts(auth: MSOB2CAuth) -> list[selector.SelectOptionDict]:
|
||||
"""Retrieve the list of accounts associated with the authenticated user."""
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
accounts = await _aw.api.get_associated_accounts()
|
||||
return [
|
||||
selector.SelectOptionDict(
|
||||
value=str(account["account_number"]),
|
||||
label=humanize_account_data(account),
|
||||
)
|
||||
for account in accounts["result"]["active"]
|
||||
]
|
||||
|
||||
|
||||
async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2CAuth:
|
||||
"""Validate the provided account number."""
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter(account_number)
|
||||
@@ -57,36 +81,91 @@ async def validate_credentials(
|
||||
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anglian Water."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self.authenticator: MSOB2CAuth | None = None
|
||||
self.accounts: list[selector.SelectOptionDict] = []
|
||||
self.user_input: dict[str, Any] | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
validation_response = await validate_credentials(
|
||||
MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
self.authenticator = MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
validation_response = await validate_credentials(self.authenticator)
|
||||
if isinstance(validation_response, str):
|
||||
errors["base"] = validation_response
|
||||
else:
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
},
|
||||
self.accounts = await get_accounts(self.authenticator)
|
||||
if len(self.accounts) > 1:
|
||||
self.user_input = user_input
|
||||
return await self.async_step_select_account()
|
||||
account_number = self.accounts[0]["value"]
|
||||
self.user_input = user_input
|
||||
return await self.async_step_complete(
|
||||
{
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the account selection step."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
if TYPE_CHECKING:
|
||||
assert self.authenticator
|
||||
validation_result = await validate_account(
|
||||
self.authenticator,
|
||||
user_input[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
if isinstance(validation_result, str):
|
||||
errors["base"] = validation_result
|
||||
else:
|
||||
return await self.async_step_complete(user_input)
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.SelectSelector(
|
||||
selector.SelectSelectorConfig(
|
||||
options=self.accounts,
|
||||
multiple=False,
|
||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_complete(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle the final configuration step."""
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
self._abort_if_unique_id_configured()
|
||||
if TYPE_CHECKING:
|
||||
assert self.authenticator
|
||||
assert self.user_input
|
||||
config_entry_data = {
|
||||
**self.user_input,
|
||||
CONF_ACCOUNT_NUMBER: user_input[CONF_ACCOUNT_NUMBER],
|
||||
CONF_ACCESS_TOKEN: self.authenticator.refresh_token,
|
||||
}
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
data=config_entry_data,
|
||||
)
|
||||
|
||||
@@ -10,14 +10,21 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"select_account": {
|
||||
"data": {
|
||||
"account_number": "Billing account number"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Select the billing account you wish to use."
|
||||
},
|
||||
"description": "Multiple active billing accounts were found with your credentials. Please select the account you wish to use. If this is unexpected, contact Anglian Water to confirm your active accounts."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"account_number": "Billing Account Number",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to log in to the Anglian Water website."
|
||||
},
|
||||
|
||||
@@ -136,6 +136,7 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"scene",
|
||||
"siren",
|
||||
"switch",
|
||||
"text",
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==65"],
|
||||
"requirements": ["axis==66"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -25,6 +25,7 @@ from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import (
|
||||
@@ -42,7 +43,12 @@ from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
from .utils import (
|
||||
dispatcher_join_signal,
|
||||
dispatcher_unjoin_signal,
|
||||
format_unique_id,
|
||||
id_to_paired_player,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import BluesoundConfigEntry
|
||||
@@ -83,9 +89,11 @@ async def async_setup_entry(
|
||||
SERVICE_CLEAR_TIMER, None, "async_clear_timer"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join"
|
||||
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_bluesound_join"
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_UNJOIN, None, "async_bluesound_unjoin"
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
@@ -120,6 +128,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self._presets: list[Preset] = coordinator.data.presets
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._group_members: list[str] | None = None
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
@@ -180,6 +189,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
self._group_members = self.rebuild_group_members()
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -365,11 +375,13 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
|
||||
supported = (
|
||||
MediaPlayerEntityFeature.CLEAR_PLAYLIST
|
||||
| MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.GROUPING
|
||||
)
|
||||
|
||||
if not self._status.indexing:
|
||||
@@ -421,8 +433,57 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
return shuffle
|
||||
|
||||
async def async_join(self, master: str) -> None:
|
||||
@property
|
||||
def group_members(self) -> list[str] | None:
|
||||
"""Get list of group members. Leader is always first."""
|
||||
return self._group_members
|
||||
|
||||
async def async_join_players(self, group_members: list[str]) -> None:
|
||||
"""Join `group_members` as a player group with the current player."""
|
||||
if self.entity_id in group_members:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
|
||||
|
||||
paired_players = []
|
||||
for group_member in group_members:
|
||||
sync_status = entity_ids_with_sync_status.get(group_member)
|
||||
if sync_status is None:
|
||||
continue
|
||||
paired_player = id_to_paired_player(sync_status.id)
|
||||
if paired_player:
|
||||
paired_players.append(paired_player)
|
||||
|
||||
if paired_players:
|
||||
await self._player.add_followers(paired_players)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Remove this player from any group."""
|
||||
if self._sync_status.leader is not None:
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
)
|
||||
|
||||
if self._sync_status.followers is not None:
|
||||
await self._player.remove_follower(self.host, self.port)
|
||||
|
||||
async def async_bluesound_join(self, master: str) -> None:
|
||||
"""Join the player to a group."""
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_JOIN}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_join",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
|
||||
if master == self.entity_id:
|
||||
raise ServiceValidationError("Cannot join player to itself")
|
||||
|
||||
@@ -431,18 +492,24 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
self.hass, dispatcher_join_signal(master), self.host, self.port
|
||||
)
|
||||
|
||||
async def async_unjoin(self) -> None:
|
||||
async def async_bluesound_unjoin(self) -> None:
|
||||
"""Unjoin the player from a group."""
|
||||
if self._sync_status.leader is None:
|
||||
return
|
||||
|
||||
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
|
||||
|
||||
_LOGGER.debug("Trying to unjoin player: %s", self.id)
|
||||
async_dispatcher_send(
|
||||
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"deprecated_service_{SERVICE_UNJOIN}",
|
||||
is_fixable=False,
|
||||
breaks_in_ha_version="2026.7.0",
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_service_unjoin",
|
||||
translation_placeholders={
|
||||
"name": slugify(self.sync_status.name),
|
||||
},
|
||||
)
|
||||
|
||||
await self.async_unjoin_player()
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""List members in group."""
|
||||
@@ -488,6 +555,63 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
def rebuild_group_members(self) -> list[str] | None:
|
||||
"""Get list of group members. Leader is always first."""
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return None
|
||||
|
||||
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
|
||||
|
||||
leader_entity_id = None
|
||||
followers = None
|
||||
if self.sync_status.followers is not None:
|
||||
leader_entity_id = self.entity_id
|
||||
followers = self.sync_status.followers
|
||||
elif self.sync_status.leader is not None:
|
||||
leader_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for entity_id, sync_status in entity_ids_with_sync_status.items():
|
||||
if sync_status.id == leader_id:
|
||||
leader_entity_id = entity_id
|
||||
followers = sync_status.followers
|
||||
break
|
||||
|
||||
if leader_entity_id is None or followers is None:
|
||||
return None
|
||||
|
||||
grouped_entity_ids = [leader_entity_id]
|
||||
for follower in followers:
|
||||
follower_id = f"{follower.ip}:{follower.port}"
|
||||
entity_ids = [
|
||||
entity_id
|
||||
for entity_id, sync_status in entity_ids_with_sync_status.items()
|
||||
if sync_status.id == follower_id
|
||||
]
|
||||
match entity_ids:
|
||||
case [entity_id]:
|
||||
grouped_entity_ids.append(entity_id)
|
||||
|
||||
return grouped_entity_ids
|
||||
|
||||
def _entity_ids_with_sync_status(self) -> dict[str, SyncStatus]:
|
||||
result = {}
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
|
||||
config_entries: list[BluesoundConfigEntry] = (
|
||||
self.hass.config_entries.async_entries(DOMAIN)
|
||||
)
|
||||
for config_entry in config_entries:
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
if entity_entry.domain == "media_player":
|
||||
result[entity_entry.entity_id] = (
|
||||
config_entry.runtime_data.coordinator.data.sync_status
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def async_add_follower(self, host: str, port: int) -> None:
|
||||
"""Add follower to leader."""
|
||||
await self._player.add_follower(host, port)
|
||||
|
||||
@@ -41,9 +41,17 @@
|
||||
"description": "Use `button.{name}_clear_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.clear_sleep_timer"
|
||||
},
|
||||
"deprecated_service_join": {
|
||||
"description": "Use the `media_player.join` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.join"
|
||||
},
|
||||
"deprecated_service_set_sleep_timer": {
|
||||
"description": "Use `button.{name}_set_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.set_sleep_timer"
|
||||
},
|
||||
"deprecated_service_unjoin": {
|
||||
"description": "Use the `media_player.unjoin` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
|
||||
"title": "Detected use of deprecated action bluesound.unjoin"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Utility functions for the Bluesound component."""
|
||||
|
||||
from pyblu import PairedPlayer
|
||||
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
|
||||
|
||||
@@ -19,3 +21,12 @@ def dispatcher_unjoin_signal(leader_id: str) -> str:
|
||||
Id is ip_address:port. This can be obtained from sync_status.id.
|
||||
"""
|
||||
return f"bluesound_unjoin_{leader_id}"
|
||||
|
||||
|
||||
def id_to_paired_player(id: str) -> PairedPlayer | None:
|
||||
"""Try to convert id in format 'ip:port' to PairedPlayer. Returns None if unable to do so."""
|
||||
match id.rsplit(":", 1):
|
||||
case [str() as ip, str() as port] if port.isdigit():
|
||||
return PairedPlayer(ip, int(port))
|
||||
case _:
|
||||
return None
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.15.0"]
|
||||
"requirements": ["bthome-ble==3.17.0"]
|
||||
}
|
||||
|
||||
@@ -98,6 +98,18 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"trigger": "mdi:thermostat"
|
||||
},
|
||||
@@ -110,6 +122,12 @@
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"trigger": "mdi:thermometer"
|
||||
},
|
||||
|
||||
@@ -312,6 +312,78 @@
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity changed"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity measured by one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current humidity crossed threshold"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the temperature is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the temperature is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature changed"
|
||||
},
|
||||
"current_temperature_crossed_threshold": {
|
||||
"description": "Triggers after the temperature measured by one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device current temperature crossed threshold"
|
||||
},
|
||||
"hvac_mode_changed": {
|
||||
"description": "Triggers after the mode of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
@@ -356,6 +428,42 @@
|
||||
},
|
||||
"name": "Climate-control device started heating"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity changed"
|
||||
},
|
||||
"target_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity setpoint of one or more climate-control devices crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Climate-control device target humidity crossed threshold"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers after the temperature setpoint of one or more climate-control devices changes.",
|
||||
"fields": {
|
||||
|
||||
@@ -17,7 +17,15 @@ from homeassistant.helpers.trigger import (
|
||||
make_entity_transition_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
from .const import (
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_HUMIDITY,
|
||||
ATTR_HVAC_ACTION,
|
||||
DOMAIN,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
|
||||
CONF_HVAC_MODE = "hvac_mode"
|
||||
|
||||
@@ -45,6 +53,18 @@ class HVACModeChangedTrigger(EntityTargetStateTriggerBase):
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"current_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_TEMPERATURE
|
||||
),
|
||||
"current_temperature_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_TEMPERATURE
|
||||
),
|
||||
"hvac_mode_changed": HVACModeChangedTrigger,
|
||||
"started_cooling": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.COOLING
|
||||
@@ -52,6 +72,12 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_drying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.DRYING
|
||||
),
|
||||
"target_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_HUMIDITY
|
||||
),
|
||||
"target_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_HUMIDITY
|
||||
),
|
||||
"target_temperature_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_TEMPERATURE
|
||||
),
|
||||
|
||||
@@ -65,6 +65,48 @@ hvac_mode_changed:
|
||||
- unknown
|
||||
multiple: true
|
||||
|
||||
current_humidity_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_humidity_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
target_humidity_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
target_humidity_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
current_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_temperature_crossed_threshold:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
target_temperature_changed:
|
||||
target: *trigger_climate_target
|
||||
fields:
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
"cpu_overheating": "CPU overheating",
|
||||
"none": "None",
|
||||
"pellets": "Pellets",
|
||||
"unkownn": "Unknown alarm"
|
||||
"unknown": "Unknown alarm"
|
||||
}
|
||||
},
|
||||
"convector_air_flow": {
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==43.3.0",
|
||||
"aioesphomeapi==43.6.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -8,8 +8,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .manager import async_replace_device
|
||||
|
||||
@@ -22,13 +21,6 @@ class ESPHomeRepair(RepairsFlow):
|
||||
self._data = data
|
||||
super().__init__()
|
||||
|
||||
@callback
|
||||
def _async_get_placeholders(self) -> dict[str, str]:
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
|
||||
assert issue is not None
|
||||
return issue.translation_placeholders or {}
|
||||
|
||||
|
||||
class DeviceConflictRepair(ESPHomeRepair):
|
||||
"""Handler for an issue fixing device conflict."""
|
||||
@@ -58,7 +50,6 @@ class DeviceConflictRepair(ESPHomeRepair):
|
||||
return self.async_show_menu(
|
||||
step_id="init",
|
||||
menu_options=["migrate", "manual"],
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
)
|
||||
|
||||
async def async_step_migrate(
|
||||
@@ -69,7 +60,6 @@ class DeviceConflictRepair(ESPHomeRepair):
|
||||
return self.async_show_form(
|
||||
step_id="migrate",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
)
|
||||
entry_id = self.entry_id
|
||||
await async_replace_device(self.hass, entry_id, self.stored_mac, self.mac)
|
||||
@@ -84,7 +74,6 @@ class DeviceConflictRepair(ESPHomeRepair):
|
||||
return self.async_show_form(
|
||||
step_id="manual",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(self.entry_id)
|
||||
return self.async_create_entry(data={})
|
||||
|
||||
31
homeassistant/components/fluss/__init__.py
Normal file
31
homeassistant/components/fluss/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""The Fluss+ integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import FlussDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.BUTTON]
|
||||
|
||||
|
||||
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FlussConfigEntry,
|
||||
) -> bool:
|
||||
"""Set up Fluss+ from a config entry."""
|
||||
coordinator = FlussDataUpdateCoordinator(hass, entry, entry.data[CONF_API_KEY])
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlussConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
40
homeassistant/components/fluss/button.py
Normal file
40
homeassistant/components/fluss/button.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Support for Fluss Devices."""
|
||||
|
||||
from homeassistant.components.button import ButtonEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import FlussApiClientError, FlussDataUpdateCoordinator
|
||||
from .entity import FlussEntity
|
||||
|
||||
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FlussConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Fluss Devices, filtering out any invalid payloads."""
|
||||
coordinator = entry.runtime_data
|
||||
devices = coordinator.data
|
||||
|
||||
async_add_entities(
|
||||
FlussButton(coordinator, device_id, device)
|
||||
for device_id, device in devices.items()
|
||||
)
|
||||
|
||||
|
||||
class FlussButton(FlussEntity, ButtonEntity):
|
||||
"""Representation of a Fluss button device."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.coordinator.api.async_trigger_device(self.device_id)
|
||||
except FlussApiClientError as err:
|
||||
raise HomeAssistantError(f"Failed to trigger device: {err}") from err
|
||||
55
homeassistant/components/fluss/config_flow.py
Normal file
55
homeassistant/components/fluss/config_flow.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Config flow for Fluss+ integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from fluss_api import (
|
||||
FlussApiClient,
|
||||
FlussApiClientAuthenticationError,
|
||||
FlussApiClientCommunicationError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): cv.string})
|
||||
|
||||
|
||||
class FlussConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Fluss+."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
api_key = user_input[CONF_API_KEY]
|
||||
self._async_abort_entries_match({CONF_API_KEY: api_key})
|
||||
client = FlussApiClient(
|
||||
user_input[CONF_API_KEY], session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
await client.async_get_devices()
|
||||
except FlussApiClientCommunicationError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FlussApiClientAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title="My Fluss+ Devices", data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
9
homeassistant/components/fluss/const.py
Normal file
9
homeassistant/components/fluss/const.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""Constants for the Fluss+ integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
DOMAIN = "fluss"
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = 60 # seconds
|
||||
UPDATE_INTERVAL_TIMEDELTA = timedelta(seconds=UPDATE_INTERVAL)
|
||||
50
homeassistant/components/fluss/coordinator.py
Normal file
50
homeassistant/components/fluss/coordinator.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""DataUpdateCoordinator for Fluss+ integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from fluss_api import (
|
||||
FlussApiClient,
|
||||
FlussApiClientAuthenticationError,
|
||||
FlussApiClientError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import LOGGER, UPDATE_INTERVAL_TIMEDELTA
|
||||
|
||||
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
|
||||
|
||||
|
||||
class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Manages fetching Fluss device data on a schedule."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: FlussConfigEntry, api_key: str
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.api = FlussApiClient(api_key, session=async_get_clientsession(hass))
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
name=f"Fluss+ ({slugify(api_key[:8])})",
|
||||
config_entry=config_entry,
|
||||
update_interval=UPDATE_INTERVAL_TIMEDELTA,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
|
||||
"""Fetch data from the Fluss API and return as a dictionary keyed by deviceId."""
|
||||
try:
|
||||
devices = await self.api.async_get_devices()
|
||||
except FlussApiClientAuthenticationError as err:
|
||||
raise ConfigEntryError(f"Authentication failed: {err}") from err
|
||||
except FlussApiClientError as err:
|
||||
raise UpdateFailed(f"Error fetching Fluss devices: {err}") from err
|
||||
|
||||
return {device["deviceId"]: device for device in devices.get("devices", [])}
|
||||
39
homeassistant/components/fluss/entity.py
Normal file
39
homeassistant/components/fluss/entity.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Base entities for the Fluss+ integration."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import FlussDataUpdateCoordinator
|
||||
|
||||
|
||||
class FlussEntity(CoordinatorEntity[FlussDataUpdateCoordinator]):
|
||||
"""Base class for Fluss entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FlussDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
device: dict,
|
||||
) -> None:
|
||||
"""Initialize the entity with a device ID and device data."""
|
||||
super().__init__(coordinator)
|
||||
self.device_id = device_id
|
||||
self._attr_unique_id = device_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={("fluss", device_id)},
|
||||
name=device.get("deviceName"),
|
||||
manufacturer="Fluss",
|
||||
model="Fluss+ Device",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return super().available and self.device_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def device(self) -> dict:
|
||||
"""Return the stored device data."""
|
||||
return self.coordinator.data[self.device_id]
|
||||
11
homeassistant/components/fluss/manifest.json
Normal file
11
homeassistant/components/fluss/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "fluss",
|
||||
"name": "Fluss+",
|
||||
"codeowners": ["@fluss"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/fluss",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["fluss-api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fluss-api==0.1.9.20"]
|
||||
}
|
||||
69
homeassistant/components/fluss/quality_scale.yaml
Normal file
69
homeassistant/components/fluss/quality_scale.yaml
Normal file
@@ -0,0 +1,69 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No actions present
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
entity-translations: done
|
||||
entity-device-class: done
|
||||
devices: done
|
||||
entity-category: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
Not needed
|
||||
discovery: todo
|
||||
stale-devices: todo
|
||||
diagnostics: todo
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
No icons used
|
||||
reconfiguration-flow: todo
|
||||
dynamic-devices: todo
|
||||
discovery-update-info: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No issues to repair
|
||||
docs-use-cases: done
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: done
|
||||
docs-data-update: todo
|
||||
docs-known-limitations: done
|
||||
docs-troubleshooting: todo
|
||||
docs-examples: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
23
homeassistant/components/fluss/strings.json
Normal file
23
homeassistant/components/fluss/strings.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The API key found in the profile page of the Fluss+ app."
|
||||
},
|
||||
"description": "Your Fluss API key, available in the profile page of the Fluss+ app"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,7 @@ from homeassistant.const import (
|
||||
EVENT_PANELS_UPDATED,
|
||||
EVENT_THEMES_UPDATED,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, async_get_hass, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
from homeassistant.helpers.icon import async_get_icons
|
||||
@@ -41,6 +41,7 @@ from .storage import async_setup_frontend_storage
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "frontend"
|
||||
CONF_NAME_DARK = "name_dark"
|
||||
CONF_THEMES = "themes"
|
||||
CONF_THEMES_MODES = "modes"
|
||||
CONF_THEMES_LIGHT = "light"
|
||||
@@ -526,6 +527,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _validate_selected_theme(theme: str) -> str:
|
||||
"""Validate that a user selected theme is a valid theme."""
|
||||
if theme in (DEFAULT_THEME, VALUE_NO_THEME):
|
||||
return theme
|
||||
hass = async_get_hass()
|
||||
if theme not in hass.data[DATA_THEMES]:
|
||||
raise vol.Invalid(f"Theme {theme} not found")
|
||||
return theme
|
||||
|
||||
|
||||
async def _async_setup_themes(
|
||||
hass: HomeAssistant, themes: dict[str, Any] | None
|
||||
) -> None:
|
||||
@@ -569,27 +580,32 @@ async def _async_setup_themes(
|
||||
@callback
|
||||
def set_theme(call: ServiceCall) -> None:
|
||||
"""Set backend-preferred theme."""
|
||||
name = call.data[CONF_NAME]
|
||||
mode = call.data.get("mode", "light")
|
||||
|
||||
if (
|
||||
name not in (DEFAULT_THEME, VALUE_NO_THEME)
|
||||
and name not in hass.data[DATA_THEMES]
|
||||
):
|
||||
_LOGGER.warning("Theme %s not found", name)
|
||||
return
|
||||
def _update_hass_theme(theme: str, light: bool) -> None:
|
||||
theme_key = DATA_DEFAULT_THEME if light else DATA_DEFAULT_DARK_THEME
|
||||
if theme == VALUE_NO_THEME:
|
||||
to_set = DEFAULT_THEME if light else None
|
||||
else:
|
||||
_LOGGER.info(
|
||||
"Theme %s set as default %s theme",
|
||||
theme,
|
||||
"light" if light else "dark",
|
||||
)
|
||||
to_set = theme
|
||||
hass.data[theme_key] = to_set
|
||||
|
||||
light_mode = mode == "light"
|
||||
|
||||
theme_key = DATA_DEFAULT_THEME if light_mode else DATA_DEFAULT_DARK_THEME
|
||||
|
||||
if name == VALUE_NO_THEME:
|
||||
to_set = DEFAULT_THEME if light_mode else None
|
||||
name = call.data.get(CONF_NAME)
|
||||
if name is not None and CONF_MODE in call.data:
|
||||
mode = call.data.get("mode", "light")
|
||||
light_mode = mode == "light"
|
||||
_update_hass_theme(name, light_mode)
|
||||
else:
|
||||
_LOGGER.info("Theme %s set as default %s theme", name, mode)
|
||||
to_set = name
|
||||
name_dark = call.data.get(CONF_NAME_DARK)
|
||||
if name:
|
||||
_update_hass_theme(name, True)
|
||||
if name_dark:
|
||||
_update_hass_theme(name_dark, False)
|
||||
|
||||
hass.data[theme_key] = to_set
|
||||
store.async_delay_save(
|
||||
lambda: {
|
||||
DATA_DEFAULT_THEME: hass.data[DATA_DEFAULT_THEME],
|
||||
@@ -624,11 +640,13 @@ async def _async_setup_themes(
|
||||
DOMAIN,
|
||||
SERVICE_SET_THEME,
|
||||
set_theme,
|
||||
vol.Schema(
|
||||
vol.All(
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_MODE): vol.Any("dark", "light"),
|
||||
}
|
||||
vol.Optional(CONF_NAME): _validate_selected_theme,
|
||||
vol.Exclusive(CONF_NAME_DARK, "dark_modes"): _validate_selected_theme,
|
||||
vol.Exclusive(CONF_MODE, "dark_modes"): vol.Any("dark", "light"),
|
||||
},
|
||||
cv.has_at_least_one_key(CONF_NAME, CONF_NAME_DARK),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -3,17 +3,15 @@
|
||||
set_theme:
|
||||
fields:
|
||||
name:
|
||||
required: true
|
||||
required: false
|
||||
example: "default"
|
||||
selector:
|
||||
theme:
|
||||
include_default: true
|
||||
mode:
|
||||
default: "light"
|
||||
name_dark:
|
||||
required: false
|
||||
example: "default"
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "dark"
|
||||
- "light"
|
||||
translation_key: mode
|
||||
theme:
|
||||
include_default: true
|
||||
reload_themes:
|
||||
|
||||
@@ -7,32 +7,24 @@
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"mode": {
|
||||
"options": {
|
||||
"dark": "Dark",
|
||||
"light": "Light"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload_themes": {
|
||||
"description": "Reloads themes from the YAML-configuration.",
|
||||
"name": "Reload themes"
|
||||
},
|
||||
"set_theme": {
|
||||
"description": "Sets the default theme Home Assistant uses. Can be overridden by a user.",
|
||||
"description": "Sets the theme Home Assistant uses. Can be overridden by a user.",
|
||||
"fields": {
|
||||
"mode": {
|
||||
"description": "Theme mode.",
|
||||
"name": "Mode"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name of a theme.",
|
||||
"description": "Name of the theme that is used by default.",
|
||||
"name": "Theme"
|
||||
},
|
||||
"name_dark": {
|
||||
"description": "Alternative dark-mode theme that is used by default.",
|
||||
"name": "Dark theme override"
|
||||
}
|
||||
},
|
||||
"name": "Set the default theme"
|
||||
"name": "Set theme"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["go2rtc-client==0.3.0"],
|
||||
"requirements": ["go2rtc-client==0.4.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
@@ -19,13 +19,13 @@ from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .api import AsyncConfigEntryAuth, DriveClient
|
||||
from .const import DOMAIN
|
||||
from .coordinator import GoogleDriveConfigEntry, GoogleDriveDataUpdateCoordinator
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
|
||||
type GoogleDriveConfigEntry = ConfigEntry[DriveClient]
|
||||
_PLATFORMS = (Platform.SENSOR,)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry) -> bool:
|
||||
@@ -41,11 +41,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
await auth.async_get_access_token()
|
||||
|
||||
client = DriveClient(await instance_id.async_get(hass), auth)
|
||||
entry.runtime_data = client
|
||||
|
||||
# Test we can access Google Drive and raise if not
|
||||
try:
|
||||
await client.async_create_ha_root_folder_if_not_exists()
|
||||
folder_id, _ = await client.async_create_ha_root_folder_if_not_exists()
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
@@ -55,6 +54,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(async_notify_backup_listeners))
|
||||
|
||||
entry.runtime_data = GoogleDriveDataUpdateCoordinator(
|
||||
hass, entry=entry, client=client, backup_folder_id=folder_id
|
||||
)
|
||||
await entry.runtime_data.async_config_entry_first_refresh()
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -62,4 +68,6 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleDriveConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -27,6 +28,16 @@ _UPLOAD_MAX_RETRIES = 20
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StorageQuotaData:
|
||||
"""Class to represent storage quota data."""
|
||||
|
||||
limit: int | None
|
||||
usage: int
|
||||
usage_in_drive: int
|
||||
usage_in_trash: int
|
||||
|
||||
|
||||
class AsyncConfigEntryAuth(AbstractAuth):
|
||||
"""Provide Google Drive authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
@@ -95,6 +106,19 @@ class DriveClient:
|
||||
res = await self._api.get_user(params={"fields": "user(emailAddress)"})
|
||||
return str(res["user"]["emailAddress"])
|
||||
|
||||
async def async_get_storage_quota(self) -> StorageQuotaData:
|
||||
"""Get storage quota of the current user."""
|
||||
res = await self._api.get_user(params={"fields": "storageQuota"})
|
||||
|
||||
storageQuota = res["storageQuota"]
|
||||
limit = storageQuota.get("limit")
|
||||
return StorageQuotaData(
|
||||
limit=int(limit) if limit is not None else None,
|
||||
usage=int(storageQuota.get("usage", 0)),
|
||||
usage_in_drive=int(storageQuota.get("usageInDrive", 0)),
|
||||
usage_in_trash=int(storageQuota.get("usageInTrash", 0)),
|
||||
)
|
||||
|
||||
async def async_create_ha_root_folder_if_not_exists(self) -> tuple[str, str]:
|
||||
"""Create Home Assistant folder if it doesn't exist."""
|
||||
fields = "id,name"
|
||||
@@ -178,6 +202,12 @@ class DriveClient:
|
||||
backups.append(backup)
|
||||
return backups
|
||||
|
||||
async def async_get_size_of_all_backups(self) -> int:
|
||||
"""Get size of all backups."""
|
||||
backups = await self.async_list_backups()
|
||||
|
||||
return sum(backup.size for backup in backups)
|
||||
|
||||
async def async_get_backup_file_id(self, backup_id: str) -> str | None:
|
||||
"""Get file_id of backup if it exists."""
|
||||
query = " and ".join(
|
||||
|
||||
@@ -68,7 +68,7 @@ class GoogleDriveBackupAgent(BackupAgent):
|
||||
assert config_entry.unique_id
|
||||
self.name = config_entry.title
|
||||
self.unique_id = slugify(config_entry.unique_id)
|
||||
self._client = config_entry.runtime_data
|
||||
self._client = config_entry.runtime_data.client
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
|
||||
@@ -14,10 +14,9 @@ from homeassistant.helpers import config_entry_oauth2_flow, instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .api import AsyncConfigFlowAuth, DriveClient
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, DRIVE_FOLDER_URL_PREFIX
|
||||
|
||||
DEFAULT_NAME = "Google Drive"
|
||||
DRIVE_FOLDER_URL_PREFIX = "https://drive.google.com/drive/folders/"
|
||||
OAUTH2_SCOPES = [
|
||||
"https://www.googleapis.com/auth/drive.file",
|
||||
]
|
||||
|
||||
@@ -2,4 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
DOMAIN = "google_drive"
|
||||
|
||||
SCAN_INTERVAL = timedelta(hours=6)
|
||||
DRIVE_FOLDER_URL_PREFIX = "https://drive.google.com/drive/folders/"
|
||||
|
||||
76
homeassistant/components/google_drive/coordinator.py
Normal file
76
homeassistant/components/google_drive/coordinator.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""DataUpdateCoordinator for Google Drive."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .api import DriveClient, StorageQuotaData
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
|
||||
type GoogleDriveConfigEntry = ConfigEntry[GoogleDriveDataUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SensorData:
|
||||
"""Class to represent sensor data."""
|
||||
|
||||
storage_quota: StorageQuotaData
|
||||
all_backups_size: int
|
||||
|
||||
|
||||
class GoogleDriveDataUpdateCoordinator(DataUpdateCoordinator[SensorData]):
|
||||
"""Class to manage fetching Google Drive data from single endpoint."""
|
||||
|
||||
client: DriveClient
|
||||
config_entry: GoogleDriveConfigEntry
|
||||
email_address: str
|
||||
backup_folder_id: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
client: DriveClient,
|
||||
backup_folder_id: str,
|
||||
entry: GoogleDriveConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize Google Drive data updater."""
|
||||
self.client = client
|
||||
self.backup_folder_id = backup_folder_id
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Do initialization logic."""
|
||||
self.email_address = await self.client.async_get_email_address()
|
||||
|
||||
async def _async_update_data(self) -> SensorData:
|
||||
"""Fetch data from Google Drive."""
|
||||
try:
|
||||
storage_quota = await self.client.async_get_storage_quota()
|
||||
all_backups_size = await self.client.async_get_size_of_all_backups()
|
||||
return SensorData(
|
||||
storage_quota=storage_quota,
|
||||
all_backups_size=all_backups_size,
|
||||
)
|
||||
except GoogleDriveApiError as error:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_response_google_drive_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
48
homeassistant/components/google_drive/diagnostics.py
Normal file
48
homeassistant/components/google_drive/diagnostics.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Diagnostics support for Google Drive."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
DATA_MANAGER as BACKUP_DATA_MANAGER,
|
||||
BackupManager,
|
||||
)
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import GoogleDriveConfigEntry
|
||||
|
||||
TO_REDACT = (CONF_ACCESS_TOKEN, "refresh_token")
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: GoogleDriveConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
backup_manager: BackupManager = hass.data[BACKUP_DATA_MANAGER]
|
||||
|
||||
backups = await coordinator.client.async_list_backups()
|
||||
|
||||
data = {
|
||||
"coordinator_data": dataclasses.asdict(coordinator.data),
|
||||
"config": {
|
||||
**entry.data,
|
||||
**entry.options,
|
||||
},
|
||||
"backup_folder_id": coordinator.backup_folder_id,
|
||||
"backup_agents": [
|
||||
{"name": agent.name}
|
||||
for agent in backup_manager.backup_agents.values()
|
||||
if agent.domain == DOMAIN
|
||||
],
|
||||
"backup": [backup.as_dict() for backup in backups],
|
||||
}
|
||||
|
||||
return async_redact_data(data, TO_REDACT)
|
||||
25
homeassistant/components/google_drive/entity.py
Normal file
25
homeassistant/components/google_drive/entity.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Define the Google Drive entity."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, DRIVE_FOLDER_URL_PREFIX
|
||||
from .coordinator import GoogleDriveDataUpdateCoordinator
|
||||
|
||||
|
||||
class GoogleDriveEntity(CoordinatorEntity[GoogleDriveDataUpdateCoordinator]):
|
||||
"""Defines a base Google Drive entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device information about this Google Drive device."""
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, str(self.coordinator.config_entry.unique_id))},
|
||||
name=self.coordinator.email_address,
|
||||
manufacturer="Google",
|
||||
model="Google Drive",
|
||||
configuration_url=f"{DRIVE_FOLDER_URL_PREFIX}{self.coordinator.backup_folder_id}",
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
)
|
||||
21
homeassistant/components/google_drive/icons.json
Normal file
21
homeassistant/components/google_drive/icons.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"backups_size": {
|
||||
"default": "mdi:database"
|
||||
},
|
||||
"storage_total": {
|
||||
"default": "mdi:database"
|
||||
},
|
||||
"storage_used": {
|
||||
"default": "mdi:database"
|
||||
},
|
||||
"storage_used_in_drive": {
|
||||
"default": "mdi:database"
|
||||
},
|
||||
"storage_used_in_drive_trash": {
|
||||
"default": "mdi:database"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,7 @@ rules:
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: No polling.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
@@ -17,12 +15,8 @@ rules:
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
@@ -38,39 +32,24 @@ rules:
|
||||
status: exempt
|
||||
comment: No configuration options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: No actions and no entities.
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: No data to diagnose.
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: No discovery.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: No updates.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration only serves backup.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
@@ -79,20 +58,13 @@ rules:
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
status: done
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
@@ -104,8 +76,9 @@ rules:
|
||||
status: exempt
|
||||
comment: No repairs.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
status: done
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
||||
127
homeassistant/components/google_drive/sensor.py
Normal file
127
homeassistant/components/google_drive/sensor.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Support for GoogleDrive sensors."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfInformation
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import (
|
||||
GoogleDriveConfigEntry,
|
||||
GoogleDriveDataUpdateCoordinator,
|
||||
SensorData,
|
||||
)
|
||||
from .entity import GoogleDriveEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GoogleDriveSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes GoogleDrive sensor entity."""
|
||||
|
||||
exists_fn: Callable[[SensorData], bool] = lambda _: True
|
||||
value_fn: Callable[[SensorData], StateType]
|
||||
|
||||
|
||||
SENSORS: tuple[GoogleDriveSensorEntityDescription, ...] = (
|
||||
GoogleDriveSensorEntityDescription(
|
||||
key="storage_total",
|
||||
translation_key="storage_total",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.storage_quota.limit,
|
||||
exists_fn=lambda data: data.storage_quota.limit is not None,
|
||||
),
|
||||
GoogleDriveSensorEntityDescription(
|
||||
key="storage_used",
|
||||
translation_key="storage_used",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.storage_quota.usage,
|
||||
),
|
||||
GoogleDriveSensorEntityDescription(
|
||||
key="storage_used_in_drive",
|
||||
translation_key="storage_used_in_drive",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.storage_quota.usage_in_drive,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
GoogleDriveSensorEntityDescription(
|
||||
key="storage_used_in_drive_trash",
|
||||
translation_key="storage_used_in_drive_trash",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.GIBIBYTES,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.storage_quota.usage_in_trash,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
GoogleDriveSensorEntityDescription(
|
||||
key="backups_size",
|
||||
translation_key="backups_size",
|
||||
native_unit_of_measurement=UnitOfInformation.BYTES,
|
||||
suggested_unit_of_measurement=UnitOfInformation.MEBIBYTES,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.DATA_SIZE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.all_backups_size,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: GoogleDriveConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up GoogleDrive sensor based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
GoogleDriveSensorEntity(coordinator, description)
|
||||
for description in SENSORS
|
||||
if description.exists_fn(coordinator.data)
|
||||
)
|
||||
|
||||
|
||||
class GoogleDriveSensorEntity(GoogleDriveEntity, SensorEntity):
|
||||
"""Defines a Google Drive sensor entity."""
|
||||
|
||||
entity_description: GoogleDriveSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: GoogleDriveDataUpdateCoordinator,
|
||||
description: GoogleDriveSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize a Google Drive sensor entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
@@ -42,5 +42,24 @@
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"backups_size": {
|
||||
"name": "Total size of backups"
|
||||
},
|
||||
"storage_total": {
|
||||
"name": "Total available storage"
|
||||
},
|
||||
"storage_used": {
|
||||
"name": "Used storage"
|
||||
},
|
||||
"storage_used_in_drive": {
|
||||
"name": "Used storage in Drive"
|
||||
},
|
||||
"storage_used_in_drive_trash": {
|
||||
"name": "Used storage in Drive Trash"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_light_local",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-local-api==2.2.0"]
|
||||
"requirements": ["govee-local-api==2.3.0"]
|
||||
}
|
||||
|
||||
@@ -44,11 +44,16 @@ class HomeWizardBatteryModeSelectEntity(HomeWizardEntity, SelectEntity):
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
batteries = coordinator.data.batteries
|
||||
battery_count = batteries.battery_count if batteries is not None else None
|
||||
entity_registry_enabled_default = (
|
||||
battery_count is not None and battery_count > 0
|
||||
)
|
||||
description = SelectEntityDescription(
|
||||
key="battery_group_mode",
|
||||
translation_key="battery_group_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_registry_enabled_default=entity_registry_enabled_default,
|
||||
options=[
|
||||
str(mode)
|
||||
for mode in (coordinator.data.device.supported_battery_modes() or [])
|
||||
|
||||
@@ -198,6 +198,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
"dlbandwidth": HuaweiSensorEntityDescription(
|
||||
key="dlbandwidth",
|
||||
translation_key="downlink_bandwidth",
|
||||
# https://en.wikipedia.org/wiki/LTE_frequency_bands, arbitrary
|
||||
icon_fn=lambda x: bandwidth_icon((8, 15), x),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
@@ -216,7 +217,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="ecio",
|
||||
translation_key="ecio",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# https://wiki.teltonika.lt/view/EC/IO
|
||||
# https://wiki.teltonika-networks.com/view/EC/IO
|
||||
icon_fn=lambda x: signal_icon((-20, -10, -6), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -291,8 +292,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
"nrdlbandwidth": HuaweiSensorEntityDescription(
|
||||
key="nrdlbandwidth",
|
||||
translation_key="nrdlbandwidth",
|
||||
# Could add icon_fn like we have for dlbandwidth,
|
||||
# if we find a good source what to use as 5G thresholds.
|
||||
# https://en.wikipedia.org/wiki/5G_NR_frequency_bands, arbitrary
|
||||
icon_fn=lambda x: bandwidth_icon((33, 66), x),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nrdlmcs": HuaweiSensorEntityDescription(
|
||||
@@ -314,7 +315,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="nrrsrp",
|
||||
translation_key="nrrsrp",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# Could add icon_fn as in rsrp, source for 5G thresholds?
|
||||
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
|
||||
icon_fn=lambda x: signal_icon((-100, -90, -80), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -323,7 +325,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="nrrsrq",
|
||||
translation_key="nrrsrq",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# Could add icon_fn as in rsrq, source for 5G thresholds?
|
||||
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
|
||||
icon_fn=lambda x: signal_icon((-20, -15, -10), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -332,7 +335,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="nrsinr",
|
||||
translation_key="nrsinr",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# Could add icon_fn as in sinr, source for thresholds?
|
||||
# https://wiki.teltonika-networks.com/view/SINR
|
||||
icon_fn=lambda x: signal_icon((0, 13, 20), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -354,7 +358,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
"nrulbandwidth": HuaweiSensorEntityDescription(
|
||||
key="nrulbandwidth",
|
||||
translation_key="nrulbandwidth",
|
||||
# Could add icon_fn as in ulbandwidth, source for 5G thresholds?
|
||||
# https://en.wikipedia.org/wiki/5G_NR_frequency_bands, arbitrary
|
||||
icon_fn=lambda x: bandwidth_icon((33, 66), x),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"nrulmcs": HuaweiSensorEntityDescription(
|
||||
@@ -386,7 +391,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="rscp",
|
||||
translation_key="rscp",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# https://wiki.teltonika.lt/view/RSCP
|
||||
# https://wiki.teltonika-networks.com/view/RSCP
|
||||
icon_fn=lambda x: signal_icon((-95, -85, -75), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
@@ -395,8 +400,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="rsrp",
|
||||
translation_key="rsrp",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# http://www.lte-anbieter.info/technik/rsrp.php # codespell:ignore technik
|
||||
icon_fn=lambda x: signal_icon((-110, -95, -80), x),
|
||||
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
|
||||
icon_fn=lambda x: signal_icon((-100, -90, -80), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -405,8 +410,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="rsrq",
|
||||
translation_key="rsrq",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# http://www.lte-anbieter.info/technik/rsrq.php # codespell:ignore technik
|
||||
icon_fn=lambda x: signal_icon((-11, -8, -5), x),
|
||||
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
|
||||
icon_fn=lambda x: signal_icon((-20, -15, -10), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -415,8 +420,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="rssi",
|
||||
translation_key="rssi",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# https://eyesaas.com/wi-fi-signal-strength/
|
||||
icon_fn=lambda x: signal_icon((-80, -70, -60), x),
|
||||
# https://wiki.teltonika-networks.com/view/RSSI
|
||||
icon_fn=lambda x: signal_icon((-95, -85, -75), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -436,8 +441,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
key="sinr",
|
||||
translation_key="sinr",
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
# http://www.lte-anbieter.info/technik/sinr.php # codespell:ignore technik
|
||||
icon_fn=lambda x: signal_icon((0, 5, 10), x),
|
||||
# https://wiki.teltonika-networks.com/view/SINR
|
||||
icon_fn=lambda x: signal_icon((0, 13, 20), x),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=True,
|
||||
@@ -479,6 +484,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
|
||||
"ulbandwidth": HuaweiSensorEntityDescription(
|
||||
key="ulbandwidth",
|
||||
translation_key="uplink_bandwidth",
|
||||
# https://en.wikipedia.org/wiki/LTE_frequency_bands, arbitrary
|
||||
icon_fn=lambda x: bandwidth_icon((8, 15), x),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
@@ -773,10 +779,15 @@ async def async_setup_entry(
|
||||
continue
|
||||
if key_meta := SENSOR_META.get(key):
|
||||
if key_meta.include:
|
||||
items = filter(key_meta.include.search, items)
|
||||
items = {k: v for k, v in items.items() if key_meta.include.search(k)}
|
||||
if key_meta.exclude:
|
||||
items = [x for x in items if not key_meta.exclude.search(x)]
|
||||
for item in items:
|
||||
items = {
|
||||
k: v for k, v in items.items() if not key_meta.exclude.search(k)
|
||||
}
|
||||
for item, value in items.items():
|
||||
if value is None:
|
||||
_LOGGER.debug("Ignoring sensor %s.%s due to None value", key, item)
|
||||
continue
|
||||
if not (desc := SENSOR_META[key].descriptions.get(item)):
|
||||
_LOGGER.debug( # pylint: disable=hass-logger-period # false positive
|
||||
(
|
||||
|
||||
@@ -50,6 +50,12 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"started_drying": {
|
||||
"trigger": "mdi:arrow-down-bold"
|
||||
},
|
||||
|
||||
@@ -91,12 +91,26 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"number_or_entity": {
|
||||
"choices": {
|
||||
"entity": "Entity",
|
||||
"number": "Number"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
"options": {
|
||||
"above": "Above a value",
|
||||
"below": "Below a value",
|
||||
"between": "In a range",
|
||||
"outside": "Outside a range"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -135,6 +149,42 @@
|
||||
},
|
||||
"title": "Humidifier",
|
||||
"triggers": {
|
||||
"current_humidity_changed": {
|
||||
"description": "Triggers after the humidity measured by one or more humidifiers changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the humidity is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the humidity is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Humidifier current humidity changed"
|
||||
},
|
||||
"current_humidity_crossed_threshold": {
|
||||
"description": "Triggers after the humidity measured by one or more humidifiers crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Humidifier current humidity crossed threshold"
|
||||
},
|
||||
"started_drying": {
|
||||
"description": "Triggers after one or more humidifiers start drying.",
|
||||
"fields": {
|
||||
|
||||
@@ -4,13 +4,21 @@ from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_attribute_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_ACTION, DOMAIN, HumidifierAction
|
||||
from .const import ATTR_ACTION, ATTR_CURRENT_HUMIDITY, DOMAIN, HumidifierAction
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"current_humidity_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"current_humidity_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_CURRENT_HUMIDITY
|
||||
),
|
||||
"started_drying": make_entity_target_state_attribute_trigger(
|
||||
DOMAIN, ATTR_ACTION, HumidifierAction.DRYING
|
||||
),
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
target: &trigger_humidifier_target
|
||||
entity:
|
||||
domain: humidifier
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &trigger_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -14,7 +14,51 @@
|
||||
- last
|
||||
- any
|
||||
|
||||
.number_or_entity: &number_or_entity
|
||||
required: false
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain:
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
.trigger_threshold_type: &trigger_threshold_type
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- above
|
||||
- below
|
||||
- between
|
||||
- outside
|
||||
translation_key: trigger_threshold_type
|
||||
|
||||
started_drying: *trigger_common
|
||||
started_humidifying: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
|
||||
current_humidity_changed:
|
||||
target: *trigger_humidifier_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
current_humidity_crossed_threshold:
|
||||
target: *trigger_humidifier_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type: *trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
@@ -47,7 +47,7 @@ class HuumSteamer(HuumBaseEntity, NumberEntity):
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the current value."""
|
||||
return self.coordinator.data.humidity
|
||||
return self.coordinator.data.target_humidity
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
|
||||
@@ -168,6 +168,7 @@ SUPPORTED_PLATFORMS_UI: Final = {
|
||||
Platform.FAN,
|
||||
Platform.DATETIME,
|
||||
Platform.LIGHT,
|
||||
Platform.SCENE,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
@@ -227,3 +228,9 @@ class FanConf:
|
||||
"""Common config keys for fan."""
|
||||
|
||||
MAX_STEP: Final = "max_step"
|
||||
|
||||
|
||||
class SceneConf:
|
||||
"""Common config keys for scene."""
|
||||
|
||||
SCENE_NUMBER: Final = "scene_number"
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.13.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.12.19.150946"
|
||||
"knx-frontend==2025.12.24.74016"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -110,13 +110,6 @@ def _data_secure_group_key_issue_handler(
|
||||
class DataSecureGroupIssueRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow for outdated DataSecure keys."""
|
||||
|
||||
@callback
|
||||
def _async_get_placeholders(self) -> dict[str, str]:
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
|
||||
assert issue is not None
|
||||
return issue.translation_placeholders or {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
@@ -157,7 +150,6 @@ class DataSecureGroupIssueRepairFlow(RepairsFlow):
|
||||
return self.async_show_form(
|
||||
step_id="secure_knxkeys",
|
||||
data_schema=vol.Schema(fields),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -10,13 +10,23 @@ from homeassistant import config_entries
|
||||
from homeassistant.components.scene import BaseScene
|
||||
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import KNX_ADDRESS, KNX_MODULE_KEY
|
||||
from .entity import KnxYamlEntity
|
||||
from .const import DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY, SceneConf
|
||||
from .entity import (
|
||||
KnxUiEntity,
|
||||
KnxUiEntityPlatformController,
|
||||
KnxYamlEntity,
|
||||
_KnxEntityBase,
|
||||
)
|
||||
from .knx_module import KNXModule
|
||||
from .schema import SceneSchema
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_SCENE
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -26,18 +36,53 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up scene(s) for KNX platform."""
|
||||
knx_module = hass.data[KNX_MODULE_KEY]
|
||||
config: list[ConfigType] = knx_module.config_yaml[Platform.SCENE]
|
||||
platform = async_get_current_platform()
|
||||
knx_module.config_store.add_platform(
|
||||
platform=Platform.SCENE,
|
||||
controller=KnxUiEntityPlatformController(
|
||||
knx_module=knx_module,
|
||||
entity_platform=platform,
|
||||
entity_class=KnxUiScene,
|
||||
),
|
||||
)
|
||||
|
||||
async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config)
|
||||
entities: list[KnxYamlEntity | KnxUiEntity] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.SCENE):
|
||||
entities.extend(
|
||||
KnxYamlScene(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
)
|
||||
if ui_config := knx_module.config_store.data["entities"].get(Platform.SCENE):
|
||||
entities.extend(
|
||||
KnxUiScene(knx_module, unique_id, config)
|
||||
for unique_id, config in ui_config.items()
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class KNXScene(KnxYamlEntity, BaseScene):
|
||||
class _KnxScene(BaseScene, _KnxEntityBase):
|
||||
"""Representation of a KNX scene."""
|
||||
|
||||
_device: XknxScene
|
||||
|
||||
async def _async_activate(self, **kwargs: Any) -> None:
|
||||
"""Activate the scene."""
|
||||
await self._device.run()
|
||||
|
||||
def after_update_callback(self, device: XknxDevice) -> None:
|
||||
"""Call after device was updated."""
|
||||
self._async_record_activation()
|
||||
super().after_update_callback(device)
|
||||
|
||||
|
||||
class KnxYamlScene(_KnxScene, KnxYamlEntity):
|
||||
"""Representation of a KNX scene configured from YAML."""
|
||||
|
||||
_device: XknxScene
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Init KNX scene."""
|
||||
"""Initialize KNX scene."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxScene(
|
||||
@@ -52,11 +97,28 @@ class KNXScene(KnxYamlEntity, BaseScene):
|
||||
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
|
||||
)
|
||||
|
||||
async def _async_activate(self, **kwargs: Any) -> None:
|
||||
"""Activate the scene."""
|
||||
await self._device.run()
|
||||
|
||||
def after_update_callback(self, device: XknxDevice) -> None:
|
||||
"""Call after device was updated."""
|
||||
self._async_record_activation()
|
||||
super().after_update_callback(device)
|
||||
class KnxUiScene(_KnxScene, KnxUiEntity):
|
||||
"""Representation of a KNX scene configured from the UI."""
|
||||
|
||||
_device: XknxScene
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
knx_module: KNXModule,
|
||||
unique_id: str,
|
||||
config: ConfigType,
|
||||
) -> None:
|
||||
"""Initialize KNX scene."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
self._device = XknxScene(
|
||||
xknx=knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
group_address=knx_conf.get_write(CONF_GA_SCENE),
|
||||
scene_number=knx_conf.get(SceneConf.SCENE_NUMBER),
|
||||
)
|
||||
|
||||
@@ -61,6 +61,7 @@ from .const import (
|
||||
CoverConf,
|
||||
FanConf,
|
||||
FanZeroMode,
|
||||
SceneConf,
|
||||
)
|
||||
from .validation import (
|
||||
backwards_compatible_xknx_climate_enum_member,
|
||||
@@ -822,7 +823,7 @@ class SceneSchema(KNXPlatformSchema):
|
||||
{
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Required(KNX_ADDRESS): ga_list_validator,
|
||||
vol.Required(CONF_SCENE_NUMBER): vol.All(
|
||||
vol.Required(SceneConf.SCENE_NUMBER): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=1, max=64)
|
||||
),
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
|
||||
@@ -72,5 +72,8 @@ CONF_GA_WHITE_SWITCH: Final = "ga_white_switch"
|
||||
CONF_GA_HUE: Final = "ga_hue"
|
||||
CONF_GA_SATURATION: Final = "ga_saturation"
|
||||
|
||||
# Scene
|
||||
CONF_GA_SCENE: Final = "ga_scene"
|
||||
|
||||
# Sensor
|
||||
CONF_ALWAYS_CALLBACK: Final = "always_callback"
|
||||
|
||||
@@ -40,6 +40,7 @@ from ..const import (
|
||||
CoverConf,
|
||||
FanConf,
|
||||
FanZeroMode,
|
||||
SceneConf,
|
||||
)
|
||||
from ..dpt import get_supported_dpts
|
||||
from .const import (
|
||||
@@ -82,6 +83,7 @@ from .const import (
|
||||
CONF_GA_RED_BRIGHTNESS,
|
||||
CONF_GA_RED_SWITCH,
|
||||
CONF_GA_SATURATION,
|
||||
CONF_GA_SCENE,
|
||||
CONF_GA_SENSOR,
|
||||
CONF_GA_SETPOINT_SHIFT,
|
||||
CONF_GA_SPEED,
|
||||
@@ -419,6 +421,25 @@ LIGHT_KNX_SCHEMA = AllSerializeFirst(
|
||||
),
|
||||
)
|
||||
|
||||
SCENE_KNX_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_GA_SCENE): GASelector(
|
||||
state=False,
|
||||
passive=False,
|
||||
write_required=True,
|
||||
valid_dpt=["17.001", "18.001"],
|
||||
),
|
||||
vol.Required(SceneConf.SCENE_NUMBER): AllSerializeFirst(
|
||||
selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
min=1, max=64, step=1, mode=selector.NumberSelectorMode.BOX
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
SWITCH_KNX_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_GA_SWITCH): GASelector(write_required=True, valid_dpt="1"),
|
||||
@@ -694,6 +715,7 @@ KNX_SCHEMA_FOR_PLATFORM = {
|
||||
Platform.DATETIME: DATETIME_KNX_SCHEMA,
|
||||
Platform.FAN: FAN_KNX_SCHEMA,
|
||||
Platform.LIGHT: LIGHT_KNX_SCHEMA,
|
||||
Platform.SCENE: SCENE_KNX_SCHEMA,
|
||||
Platform.SENSOR: SENSOR_KNX_SCHEMA,
|
||||
Platform.SWITCH: SWITCH_KNX_SCHEMA,
|
||||
Platform.TIME: TIME_KNX_SCHEMA,
|
||||
|
||||
@@ -347,8 +347,16 @@
|
||||
},
|
||||
"knx": {
|
||||
"knx_group_address": {
|
||||
"add_passive_address": "+ Add passive address",
|
||||
"dpt": "Datapoint type",
|
||||
"passive_addresses": "Passive addresses",
|
||||
"dpt_incompatible": "DPT {dpt} is incompatible with this field's expected DPTs.",
|
||||
"dpt_no_selection": "No DPT selected",
|
||||
"dpt_select": "Select DPT",
|
||||
"group_address_none_for_dpt": "No group addresses known for expected DPT",
|
||||
"group_address_none_for_filter": "No group addresses known for current filter",
|
||||
"group_address_search": "Search group address",
|
||||
"group_address_unknown": "Unknown group address",
|
||||
"passive_address": "Passive address",
|
||||
"send_address": "Send address",
|
||||
"state_address": "State address",
|
||||
"valid_dpts": "Valid DPTs"
|
||||
@@ -774,6 +782,19 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"scene": {
|
||||
"description": "A KNX entity can activate a KNX scene and updates when the scene number is received.",
|
||||
"knx": {
|
||||
"ga_scene": {
|
||||
"description": "Group address to activate a scene.",
|
||||
"label": "Scene"
|
||||
},
|
||||
"scene_number": {
|
||||
"description": "The scene number this entity is associated with.",
|
||||
"label": "Scene number"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"description": "Read-only entity for numeric or string datapoints. Temperature, percent etc.",
|
||||
"knx": {
|
||||
|
||||
@@ -32,15 +32,15 @@ async def async_migrate_entry(
|
||||
entity_registry, config_entry.entry_id
|
||||
)
|
||||
for reg_entry in registry_entries:
|
||||
new_entity_id = f"{config_entry.entry_id}_{reg_entry.unique_id[4:]}"
|
||||
new_unique_id = f"{config_entry.entry_id}_{reg_entry.unique_id[4:]}"
|
||||
_LOGGER.debug(
|
||||
"Migrating entity %s unique id from %s to %s",
|
||||
reg_entry.entity_id,
|
||||
reg_entry.unique_id,
|
||||
new_entity_id,
|
||||
new_unique_id,
|
||||
)
|
||||
entity_registry.async_update_entity(
|
||||
reg_entry.entity_id, new_unique_id=new_entity_id
|
||||
reg_entry.entity_id, new_unique_id=new_unique_id
|
||||
)
|
||||
|
||||
# Migrate device identifiers
|
||||
|
||||
@@ -46,7 +46,7 @@ class LibreHardwareMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
try:
|
||||
_ = (await api.get_data()).main_device_ids_and_names.values()
|
||||
computer_name = (await api.get_data()).computer_name
|
||||
except LibreHardwareMonitorConnectionError as exception:
|
||||
_LOGGER.error(exception)
|
||||
errors["base"] = "cannot_connect"
|
||||
@@ -54,7 +54,7 @@ class LibreHardwareMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "no_devices"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}",
|
||||
title=f"{computer_name} ({user_input[CONF_HOST]}:{user_input[CONF_PORT]})",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
|
||||
@@ -65,7 +65,7 @@ class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitor
|
||||
lhm_data = await self._api.get_data()
|
||||
except LibreHardwareMonitorConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
"LibreHardwareMonitor connection failed, will retry"
|
||||
"LibreHardwareMonitor connection failed, will retry", retry_after=30
|
||||
) from err
|
||||
except LibreHardwareMonitorNoDevicesError as err:
|
||||
raise UpdateFailed("No sensor data available, will retry") from err
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["librehardwaremonitor-api==1.5.0"]
|
||||
"requirements": ["librehardwaremonitor-api==1.6.0"]
|
||||
}
|
||||
|
||||
@@ -66,7 +66,7 @@ class LibreHardwareMonitorSensor(
|
||||
# Hardware device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{entry_id}_{sensor_data.device_id}")},
|
||||
name=sensor_data.device_name,
|
||||
name=f"[{coordinator.data.computer_name}] {sensor_data.device_name}",
|
||||
model=sensor_data.device_type,
|
||||
)
|
||||
|
||||
|
||||
@@ -35,6 +35,12 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"brightness_changed": {
|
||||
"trigger": "mdi:lightbulb-on-50"
|
||||
},
|
||||
"brightness_crossed_threshold": {
|
||||
"trigger": "mdi:lightbulb-on-50"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:lightbulb-off"
|
||||
},
|
||||
|
||||
@@ -322,6 +322,12 @@
|
||||
"short": "Short"
|
||||
}
|
||||
},
|
||||
"number_or_entity": {
|
||||
"choices": {
|
||||
"entity": "Entity",
|
||||
"number": "Number"
|
||||
}
|
||||
},
|
||||
"state": {
|
||||
"options": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
@@ -334,6 +340,14 @@
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
},
|
||||
"trigger_threshold_type": {
|
||||
"options": {
|
||||
"above": "Above a value",
|
||||
"below": "Below a value",
|
||||
"between": "In a range",
|
||||
"outside": "Outside a range"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -509,6 +523,42 @@
|
||||
},
|
||||
"title": "Light",
|
||||
"triggers": {
|
||||
"brightness_changed": {
|
||||
"description": "Triggers after the brightness of one or more lights changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Trigger when the target brightness is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Trigger when the target brightness is below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "Light brightness changed"
|
||||
},
|
||||
"brightness_crossed_threshold": {
|
||||
"description": "Triggers after the brightness of one or more lights crosses a threshold.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::light::common::trigger_behavior_name%]"
|
||||
},
|
||||
"lower_limit": {
|
||||
"description": "Lower threshold limit.",
|
||||
"name": "Lower threshold"
|
||||
},
|
||||
"threshold_type": {
|
||||
"description": "Type of threshold crossing to trigger on.",
|
||||
"name": "Threshold type"
|
||||
},
|
||||
"upper_limit": {
|
||||
"description": "Upper threshold limit.",
|
||||
"name": "Upper threshold"
|
||||
}
|
||||
},
|
||||
"name": "Light brightness crossed threshold"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers after one or more lights turn off.",
|
||||
"fields": {
|
||||
|
||||
@@ -2,11 +2,23 @@
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_target_state_trigger
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_numerical_state_attribute_changed_trigger,
|
||||
make_entity_numerical_state_attribute_crossed_threshold_trigger,
|
||||
make_entity_target_state_trigger,
|
||||
)
|
||||
|
||||
from . import ATTR_BRIGHTNESS
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"brightness_changed": make_entity_numerical_state_attribute_changed_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"brightness_crossed_threshold": make_entity_numerical_state_attribute_crossed_threshold_trigger(
|
||||
DOMAIN, ATTR_BRIGHTNESS
|
||||
),
|
||||
"turned_off": make_entity_target_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_target_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
target: &trigger_light_target
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
behavior: &trigger_behavior
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
@@ -14,5 +14,47 @@
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
.number_or_entity: &number_or_entity
|
||||
required: false
|
||||
selector:
|
||||
choose:
|
||||
choices:
|
||||
entity:
|
||||
selector:
|
||||
entity:
|
||||
filter:
|
||||
domain:
|
||||
- input_number
|
||||
- number
|
||||
- sensor
|
||||
number:
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
translation_key: number_or_entity
|
||||
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
|
||||
brightness_changed:
|
||||
target: *trigger_light_target
|
||||
fields:
|
||||
above: *number_or_entity
|
||||
below: *number_or_entity
|
||||
|
||||
brightness_crossed_threshold:
|
||||
target: *trigger_light_target
|
||||
fields:
|
||||
behavior: *trigger_behavior
|
||||
threshold_type:
|
||||
required: true
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- above
|
||||
- below
|
||||
- between
|
||||
- outside
|
||||
translation_key: trigger_threshold_type
|
||||
lower_limit: *number_or_entity
|
||||
upper_limit: *number_or_entity
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["melissa"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["py-melissa-climate==3.0.2"]
|
||||
"requirements": ["py-melissa-climate==3.0.3"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@elmurato"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/minecraft_server",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["dnspython", "mcstatus"],
|
||||
"quality_scale": "silver",
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@meichthys"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/mullvad",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["mullvad-api==1.0.0"],
|
||||
"single_config_entry": true
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"codeowners": ["@MartinHjelmare", "@functionpointer"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/mysensors",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["mysensors"],
|
||||
"requirements": ["pymysensors==0.26.0"]
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==9.1.2"]
|
||||
|
||||
@@ -29,9 +29,13 @@ CONF_SENSOR_ID = "sensor_id"
|
||||
|
||||
ACTIVE_NAME = "Energy Usage"
|
||||
DAILY_NAME = "Daily Energy Usage"
|
||||
ACTIVE_GENERATION_NAME = "Energy Production"
|
||||
DAILY_GENERATION_NAME = "Daily Energy Production"
|
||||
|
||||
ACTIVE_TYPE = "active"
|
||||
DAILY_TYPE = "daily"
|
||||
ACTIVE_GENERATION_TYPE = "active_generation"
|
||||
DAILY_GENERATION_TYPE = "daily_generation"
|
||||
|
||||
|
||||
MIN_TIME_BETWEEN_DAILY_UPDATES = timedelta(seconds=150)
|
||||
@@ -76,6 +80,18 @@ def setup_platform(
|
||||
add_entities([NeurioEnergy(data, ACTIVE_NAME, ACTIVE_TYPE, update_active)])
|
||||
# Daily power sensor
|
||||
add_entities([NeurioEnergy(data, DAILY_NAME, DAILY_TYPE, update_daily)])
|
||||
# Active generation sensor
|
||||
add_entities(
|
||||
[
|
||||
NeurioEnergy(
|
||||
data, ACTIVE_GENERATION_NAME, ACTIVE_GENERATION_TYPE, update_active
|
||||
)
|
||||
]
|
||||
)
|
||||
# Daily generation sensor
|
||||
add_entities(
|
||||
[NeurioEnergy(data, DAILY_GENERATION_NAME, DAILY_GENERATION_TYPE, update_daily)]
|
||||
)
|
||||
|
||||
|
||||
class NeurioData:
|
||||
@@ -89,6 +105,8 @@ class NeurioData:
|
||||
|
||||
self._daily_usage = None
|
||||
self._active_power = None
|
||||
self._daily_generation = None
|
||||
self._active_generation = None
|
||||
|
||||
self._state = None
|
||||
|
||||
@@ -105,17 +123,29 @@ class NeurioData:
|
||||
"""Return latest active power value."""
|
||||
return self._active_power
|
||||
|
||||
@property
|
||||
def daily_generation(self):
|
||||
"""Return latest daily generation value."""
|
||||
return self._daily_generation
|
||||
|
||||
@property
|
||||
def active_generation(self):
|
||||
"""Return latest active generation value."""
|
||||
return self._active_generation
|
||||
|
||||
def get_active_power(self) -> None:
|
||||
"""Return current power value."""
|
||||
"""Update current power values."""
|
||||
try:
|
||||
sample = self.neurio_client.get_samples_live_last(self.sensor_id)
|
||||
self._active_power = sample["consumptionPower"]
|
||||
self._active_generation = sample.get("generationPower")
|
||||
except (requests.exceptions.RequestException, ValueError, KeyError):
|
||||
_LOGGER.warning("Could not update current power usage")
|
||||
|
||||
def get_daily_usage(self) -> None:
|
||||
"""Return current daily power usage."""
|
||||
"""Update current daily power usage and generation."""
|
||||
kwh = 0
|
||||
gen_kwh = 0
|
||||
start_time = dt_util.start_of_local_day().astimezone(dt_util.UTC).isoformat()
|
||||
end_time = dt_util.utcnow().isoformat()
|
||||
|
||||
@@ -131,8 +161,10 @@ class NeurioData:
|
||||
|
||||
for result in history:
|
||||
kwh += result["consumptionEnergy"] / 3600000
|
||||
gen_kwh += result.get("generationEnergy", 0) / 3600000
|
||||
|
||||
self._daily_usage = round(kwh, 2)
|
||||
self._daily_generation = round(gen_kwh, 2)
|
||||
|
||||
|
||||
class NeurioEnergy(SensorEntity):
|
||||
@@ -156,6 +188,16 @@ class NeurioEnergy(SensorEntity):
|
||||
self._unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||
self._attr_device_class = SensorDeviceClass.ENERGY
|
||||
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
elif sensor_type == ACTIVE_GENERATION_TYPE:
|
||||
self._attr_icon = "mdi:solar-power"
|
||||
self._unit_of_measurement = UnitOfPower.WATT
|
||||
self._attr_device_class = SensorDeviceClass.POWER
|
||||
self._attr_state_class = SensorStateClass.MEASUREMENT
|
||||
elif sensor_type == DAILY_GENERATION_TYPE:
|
||||
self._attr_icon = "mdi:solar-power"
|
||||
self._unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||
self._attr_device_class = SensorDeviceClass.ENERGY
|
||||
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -180,3 +222,7 @@ class NeurioEnergy(SensorEntity):
|
||||
self._state = self._data.active_power
|
||||
elif self._sensor_type == DAILY_TYPE:
|
||||
self._state = self._data.daily_usage
|
||||
elif self._sensor_type == ACTIVE_GENERATION_TYPE:
|
||||
self._state = self._data.active_generation
|
||||
elif self._sensor_type == DAILY_GENERATION_TYPE:
|
||||
self._state = self._data.daily_generation
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynintendoauth", "pynintendoparental"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.1.3"]
|
||||
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.0"]
|
||||
}
|
||||
|
||||
@@ -115,6 +115,12 @@ async def async_setup_entry(
|
||||
if entity.enabled:
|
||||
await entity.query_state()
|
||||
|
||||
async def disconnect_callback() -> None:
|
||||
for entity in entities.values():
|
||||
if entity.enabled:
|
||||
entity.cancel_tasks()
|
||||
entity.async_write_ha_state()
|
||||
|
||||
async def update_callback(message: Status) -> None:
|
||||
if isinstance(message, status.Raw):
|
||||
return
|
||||
@@ -146,6 +152,7 @@ async def async_setup_entry(
|
||||
async_add_entities([zone_entity])
|
||||
|
||||
manager.callbacks.connect.append(connect_callback)
|
||||
manager.callbacks.disconnect.append(disconnect_callback)
|
||||
manager.callbacks.update.append(update_callback)
|
||||
|
||||
|
||||
@@ -225,13 +232,13 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
await self.query_state()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Cancel the tasks when the entity is removed."""
|
||||
if self._query_state_task is not None:
|
||||
self._query_state_task.cancel()
|
||||
self._query_state_task = None
|
||||
if self._query_av_info_task is not None:
|
||||
self._query_av_info_task.cancel()
|
||||
self._query_av_info_task = None
|
||||
"""Entity will be removed from hass."""
|
||||
self.cancel_tasks()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return self._manager.connected
|
||||
|
||||
async def query_state(self) -> None:
|
||||
"""Query the receiver for all the info, that we care about."""
|
||||
@@ -247,6 +254,15 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
await self._manager.write(query.AudioInformation())
|
||||
await self._manager.write(query.VideoInformation())
|
||||
|
||||
def cancel_tasks(self) -> None:
|
||||
"""Cancel the tasks."""
|
||||
if self._query_state_task is not None:
|
||||
self._query_state_task.cancel()
|
||||
self._query_state_task = None
|
||||
if self._query_av_info_task is not None:
|
||||
self._query_av_info_task.cancel()
|
||||
self._query_av_info_task = None
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the media player on."""
|
||||
message = command.Power(self._zone, command.Power.Param.ON)
|
||||
|
||||
@@ -30,9 +30,9 @@ rules:
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
|
||||
@@ -28,11 +28,13 @@ class Callbacks:
|
||||
"""Receiver callbacks."""
|
||||
|
||||
connect: list[Callable[[bool], Awaitable[None]]] = field(default_factory=list)
|
||||
disconnect: list[Callable[[], Awaitable[None]]] = field(default_factory=list)
|
||||
update: list[Callable[[Status], Awaitable[None]]] = field(default_factory=list)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear all callbacks."""
|
||||
self.connect.clear()
|
||||
self.disconnect.clear()
|
||||
self.update.clear()
|
||||
|
||||
|
||||
@@ -43,6 +45,7 @@ class ReceiverManager:
|
||||
entry: OnkyoConfigEntry
|
||||
info: ReceiverInfo
|
||||
receiver: Receiver | None = None
|
||||
connected: bool = False
|
||||
callbacks: Callbacks
|
||||
|
||||
_started: asyncio.Event
|
||||
@@ -83,6 +86,7 @@ class ReceiverManager:
|
||||
while True:
|
||||
try:
|
||||
async with connect(self.info, retry=reconnect) as self.receiver:
|
||||
self.connected = True
|
||||
if not reconnect:
|
||||
self._started.set()
|
||||
else:
|
||||
@@ -96,7 +100,9 @@ class ReceiverManager:
|
||||
reconnect = True
|
||||
|
||||
finally:
|
||||
self.connected = False
|
||||
_LOGGER.info("Disconnected: %s", self.info)
|
||||
await self.on_disconnect()
|
||||
|
||||
async def on_connect(self, reconnect: bool) -> None:
|
||||
"""Receiver (re)connected."""
|
||||
@@ -109,8 +115,13 @@ class ReceiverManager:
|
||||
for callback in self.callbacks.connect:
|
||||
await callback(reconnect)
|
||||
|
||||
async def on_disconnect(self) -> None:
|
||||
"""Receiver disconnected."""
|
||||
for callback in self.callbacks.disconnect:
|
||||
await callback()
|
||||
|
||||
async def on_update(self, message: Status) -> None:
|
||||
"""Process new message from the receiver."""
|
||||
"""New message from the receiver."""
|
||||
for callback in self.callbacks.update:
|
||||
await callback(message)
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/playstation_network",
|
||||
"integration_type": "service",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["PSNAWP==3.0.1", "pyrate-limiter==3.9.0"]
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.17.1"]
|
||||
"requirements": ["reolink-aio==0.18.0"]
|
||||
}
|
||||
|
||||
@@ -122,6 +122,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
# Tracks the last successful update to control when we report failure
|
||||
# to the base class. This is reset on successful data update.
|
||||
self._last_update_success_time: datetime | None = None
|
||||
self._has_connected_locally: bool = False
|
||||
|
||||
@cached_property
|
||||
def dock_device_info(self) -> DeviceInfo:
|
||||
@@ -191,7 +192,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
async def _verify_api(self) -> None:
|
||||
"""Verify that the api is reachable."""
|
||||
if self._device.is_connected:
|
||||
if self._device.is_local_connected:
|
||||
self._has_connected_locally |= self._device.is_local_connected
|
||||
if self._has_connected_locally:
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"cloud_api_used_{self.duid_slug}"
|
||||
)
|
||||
@@ -234,6 +236,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
|
||||
async def _async_update_data(self) -> DeviceState:
|
||||
"""Update data via library."""
|
||||
await self._verify_api()
|
||||
try:
|
||||
# Update device props and standard api information
|
||||
await self._update_device_prop()
|
||||
|
||||
@@ -13,7 +13,22 @@ from .coordinator import RoborockConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TO_REDACT_CONFIG = ["token", "sn", "rruid", CONF_UNIQUE_ID, "username", "uid"]
|
||||
TO_REDACT_CONFIG = [
|
||||
"token",
|
||||
"sn",
|
||||
"rruid",
|
||||
CONF_UNIQUE_ID,
|
||||
"username",
|
||||
"uid",
|
||||
"h",
|
||||
"k",
|
||||
"s",
|
||||
"u",
|
||||
"avatarurl",
|
||||
"nickname",
|
||||
"tuyaUuid",
|
||||
"extra",
|
||||
]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==3.19.0",
|
||||
"python-roborock==3.21.0",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -92,10 +92,8 @@ class SamsungTVEntity(CoordinatorEntity[SamsungTVDataUpdateCoordinator], Entity)
|
||||
LOGGER.debug("Attempting to turn on %s via automation", self.entity_id)
|
||||
await self._turn_on_action.async_run(self.hass, self._context)
|
||||
elif self._mac:
|
||||
LOGGER.warning(
|
||||
"Attempting to turn on %s via Wake-On-Lan; if this does not work, "
|
||||
"please ensure that Wake-On-Lan is available for your device or use "
|
||||
"a turn_on automation",
|
||||
LOGGER.debug(
|
||||
"Attempting to turn on %s via Wake-On-Lan",
|
||||
self.entity_id,
|
||||
)
|
||||
await self.hass.async_add_executor_job(self._wake_on_lan)
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from .coordinator import LeilSaunaCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.LIGHT,
|
||||
Platform.SENSOR,
|
||||
|
||||
120
homeassistant/components/saunum/binary_sensor.py
Normal file
120
homeassistant/components/saunum/binary_sensor.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""Binary sensor platform for Saunum Leil Sauna Control Unit integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pysaunum import SaunumData
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import LeilSaunaConfigEntry
|
||||
from .entity import LeilSaunaEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .coordinator import LeilSaunaCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class LeilSaunaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Leil Sauna binary sensor entity."""
|
||||
|
||||
value_fn: Callable[[SaunumData], bool | None]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[LeilSaunaBinarySensorEntityDescription, ...] = (
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="door_open",
|
||||
device_class=BinarySensorDeviceClass.DOOR,
|
||||
value_fn=lambda data: data.door_open,
|
||||
),
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="alarm_door_open",
|
||||
translation_key="alarm_door_open",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarm_door_open,
|
||||
),
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="alarm_door_sensor",
|
||||
translation_key="alarm_door_sensor",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarm_door_sensor,
|
||||
),
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="alarm_thermal_cutoff",
|
||||
translation_key="alarm_thermal_cutoff",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarm_thermal_cutoff,
|
||||
),
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="alarm_internal_temp",
|
||||
translation_key="alarm_internal_temp",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarm_internal_temp,
|
||||
),
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="alarm_temp_sensor_short",
|
||||
translation_key="alarm_temp_sensor_short",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarm_temp_sensor_short,
|
||||
),
|
||||
LeilSaunaBinarySensorEntityDescription(
|
||||
key="alarm_temp_sensor_open",
|
||||
translation_key="alarm_temp_sensor_open",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.alarm_temp_sensor_open,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: LeilSaunaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Saunum Leil Sauna binary sensors from a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
LeilSaunaBinarySensorEntity(coordinator, description)
|
||||
for description in BINARY_SENSORS
|
||||
if description.value_fn(coordinator.data) is not None
|
||||
)
|
||||
|
||||
|
||||
class LeilSaunaBinarySensorEntity(LeilSaunaEntity, BinarySensorEntity):
|
||||
"""Representation of a Saunum Leil Sauna binary sensor."""
|
||||
|
||||
entity_description: LeilSaunaBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LeilSaunaCoordinator,
|
||||
description: LeilSaunaBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}-{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return the state of the binary sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
@@ -100,6 +100,12 @@ class LeilSaunaClimate(LeilSaunaEntity, ClimateEntity):
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new HVAC mode."""
|
||||
if hvac_mode == HVACMode.HEAT and self.coordinator.data.door_open:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="door_open",
|
||||
)
|
||||
|
||||
try:
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
await self.coordinator.client.async_start_session()
|
||||
|
||||
@@ -30,6 +30,26 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"alarm_door_open": {
|
||||
"name": "Door open during heating alarm"
|
||||
},
|
||||
"alarm_door_sensor": {
|
||||
"name": "Door open too long alarm"
|
||||
},
|
||||
"alarm_internal_temp": {
|
||||
"name": "Internal temperature alarm"
|
||||
},
|
||||
"alarm_temp_sensor_open": {
|
||||
"name": "Temperature sensor disconnected alarm"
|
||||
},
|
||||
"alarm_temp_sensor_short": {
|
||||
"name": "Temperature sensor shorted alarm"
|
||||
},
|
||||
"alarm_thermal_cutoff": {
|
||||
"name": "Thermal cutoff alarm"
|
||||
}
|
||||
},
|
||||
"light": {
|
||||
"light": {
|
||||
"name": "[%key:component::light::title%]"
|
||||
@@ -49,6 +69,9 @@
|
||||
"communication_error": {
|
||||
"message": "Communication error: {error}"
|
||||
},
|
||||
"door_open": {
|
||||
"message": "Cannot start sauna session when sauna door is open"
|
||||
},
|
||||
"session_not_active": {
|
||||
"message": "Cannot change fan mode when sauna session is not active"
|
||||
},
|
||||
|
||||
@@ -20,5 +20,10 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:power"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"activated": {
|
||||
"trigger": "mdi:palette"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,5 +59,11 @@
|
||||
"name": "Activate"
|
||||
}
|
||||
},
|
||||
"title": "Scene"
|
||||
"title": "Scene",
|
||||
"triggers": {
|
||||
"activated": {
|
||||
"description": "Triggers when a scene was activated",
|
||||
"name": "Scene activated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
42
homeassistant/components/scene/trigger.py
Normal file
42
homeassistant/components/scene/trigger.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Provides triggers for scenes."""
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
)
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
|
||||
class SceneActivatedTrigger(EntityTriggerBase):
|
||||
"""Trigger for scene entity activations."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the origin state is valid and different from the current state."""
|
||||
|
||||
# UNKNOWN is a valid from_state, otherwise the first time the scene is activated
|
||||
# it would not trigger
|
||||
if from_state.state == STATE_UNAVAILABLE:
|
||||
return False
|
||||
|
||||
return from_state.state != to_state.state
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check if the new state is not invalid."""
|
||||
return state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"activated": SceneActivatedTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for scenes."""
|
||||
return TRIGGERS
|
||||
4
homeassistant/components/scene/triggers.yaml
Normal file
4
homeassistant/components/scene/triggers.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
activated:
|
||||
target:
|
||||
entity:
|
||||
domain: scene
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pysmarlaapi", "pysignalr"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pysmarlaapi==0.9.2"]
|
||||
"requirements": ["pysmarlaapi==0.9.3"]
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from urllib.parse import ParseResult, urlparse
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from solarlog_cli.solarlog_connector import SolarLogConnector
|
||||
from solarlog_cli.solarlog_exceptions import (
|
||||
SolarLogAuthenticationError,
|
||||
@@ -20,7 +21,7 @@ from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import slugify
|
||||
|
||||
@@ -63,7 +64,9 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]):
|
||||
self.host,
|
||||
tz=hass.config.time_zone,
|
||||
password=password,
|
||||
session=async_get_clientsession(hass),
|
||||
session=async_create_clientsession(
|
||||
hass, cookie_jar=CookieJar(quote_cookie=False)
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import datetime
|
||||
from functools import partial
|
||||
from http import HTTPStatus
|
||||
from ipaddress import AddressValueError, IPv4Address
|
||||
import logging
|
||||
import socket
|
||||
@@ -12,7 +13,7 @@ from typing import Any, cast
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from aiohttp import ClientError
|
||||
from requests.exceptions import Timeout
|
||||
from requests.exceptions import HTTPError, Timeout
|
||||
from soco import events_asyncio, zonegroupstate
|
||||
import soco.config as soco_config
|
||||
from soco.core import SoCo
|
||||
@@ -54,6 +55,8 @@ from .const import (
|
||||
SUB_FAIL_ISSUE_ID,
|
||||
SUB_FAIL_URL,
|
||||
SUBSCRIPTION_TIMEOUT,
|
||||
UPNP_DOCUMENTATION_URL,
|
||||
UPNP_ISSUE_ID,
|
||||
UPNP_ST,
|
||||
)
|
||||
from .exception import SonosUpdateError
|
||||
@@ -184,6 +187,32 @@ class SonosDiscoveryManager:
|
||||
"""Check if device at provided IP is known to be invisible."""
|
||||
return any(x for x in self._known_invisible if x.ip_address == ip_address)
|
||||
|
||||
async def _process_http_connection_error(
|
||||
self, err: HTTPError, ip_address: str
|
||||
) -> None:
|
||||
"""Process HTTP Errors when connecting to a Sonos speaker."""
|
||||
response = err.response
|
||||
# When UPnP is disabled, Sonos returns HTTP 403 Forbidden error.
|
||||
# Create issue advising user to enable UPnP on Sonos system.
|
||||
if response is not None and response.status_code == HTTPStatus.FORBIDDEN:
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"{UPNP_ISSUE_ID}_{ip_address}",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="upnp_disabled",
|
||||
translation_placeholders={
|
||||
"device_ip": ip_address,
|
||||
"documentation_url": UPNP_DOCUMENTATION_URL,
|
||||
},
|
||||
)
|
||||
_LOGGER.error(
|
||||
"HTTP error connecting to Sonos speaker at %s: %s",
|
||||
ip_address,
|
||||
err,
|
||||
)
|
||||
|
||||
async def async_subscribe_to_zone_updates(self, ip_address: str) -> None:
|
||||
"""Test subscriptions and create SonosSpeakers based on results."""
|
||||
try:
|
||||
@@ -195,13 +224,29 @@ class SonosDiscoveryManager:
|
||||
)
|
||||
return
|
||||
soco = SoCo(ip_address)
|
||||
# Cache now to avoid household ID lookup during first ZoneGroupState processing
|
||||
await self.hass.async_add_executor_job(
|
||||
getattr,
|
||||
soco,
|
||||
"household_id",
|
||||
)
|
||||
sub = await soco.zoneGroupTopology.subscribe()
|
||||
try:
|
||||
# Cache now to avoid household ID lookup during first ZoneGroupState processing
|
||||
await self.hass.async_add_executor_job(
|
||||
getattr,
|
||||
soco,
|
||||
"household_id",
|
||||
)
|
||||
sub = await soco.zoneGroupTopology.subscribe()
|
||||
except HTTPError as err:
|
||||
await self._process_http_connection_error(err, ip_address)
|
||||
return
|
||||
except (
|
||||
OSError,
|
||||
SoCoException,
|
||||
Timeout,
|
||||
TimeoutError,
|
||||
) as err:
|
||||
_LOGGER.error(
|
||||
"Error connecting to discovered Sonos speaker at %s: %s",
|
||||
ip_address,
|
||||
err,
|
||||
)
|
||||
return
|
||||
|
||||
@callback
|
||||
def _async_add_visible_zones(subscription_succeeded: bool = False) -> None:
|
||||
@@ -390,6 +435,9 @@ class SonosDiscoveryManager:
|
||||
sync_get_visible_zones,
|
||||
soco,
|
||||
)
|
||||
except HTTPError as err:
|
||||
await self._process_http_connection_error(err, ip_addr)
|
||||
continue
|
||||
except (
|
||||
OSError,
|
||||
SoCoException,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user