Compare commits

..

1 Commits

Author SHA1 Message Date
abmantis
32d82b610c Add calendar event_started/event_ended triggers 2025-12-23 21:43:29 +00:00
184 changed files with 3273 additions and 6450 deletions

1
.gitignore vendored
View File

@@ -92,7 +92,6 @@ pip-selfcheck.json
venv
.venv
Pipfile*
uv.lock
share/*
/Scripts/

6
CODEOWNERS generated
View File

@@ -530,8 +530,6 @@ build.json @home-assistant/supervisor
/tests/components/flo/ @dmulcahey
/homeassistant/components/flume/ @ChrisMandich @bdraco @jeeftor
/tests/components/flume/ @ChrisMandich @bdraco @jeeftor
/homeassistant/components/fluss/ @fluss
/tests/components/fluss/ @fluss
/homeassistant/components/flux_led/ @icemanch
/tests/components/flux_led/ @icemanch
/homeassistant/components/forecast_solar/ @klaasnicolaas @frenck
@@ -1697,8 +1695,8 @@ build.json @home-assistant/supervisor
/tests/components/trafikverket_train/ @gjohansson-ST
/homeassistant/components/trafikverket_weatherstation/ @gjohansson-ST
/tests/components/trafikverket_weatherstation/ @gjohansson-ST
/homeassistant/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
/tests/components/transmission/ @engrbm87 @JPHutchins @andrew-codechimp
/homeassistant/components/transmission/ @engrbm87 @JPHutchins
/tests/components/transmission/ @engrbm87 @JPHutchins
/homeassistant/components/trend/ @jpbede
/tests/components/trend/ @jpbede
/homeassistant/components/triggercmd/ @rvmey

View File

@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
from .coordinator import AirobotConfigEntry, AirobotDataUpdateCoordinator
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.NUMBER, Platform.SENSOR]
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: AirobotConfigEntry) -> bool:

View File

@@ -1,9 +0,0 @@
{
"entity": {
"number": {
"hysteresis_band": {
"default": "mdi:delta"
}
}
}
}

View File

@@ -1,99 +0,0 @@
"""Number platform for Airobot thermostat."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from pyairobotrest.const import HYSTERESIS_BAND_MAX, HYSTERESIS_BAND_MIN
from pyairobotrest.exceptions import AirobotError
from homeassistant.components.number import (
NumberDeviceClass,
NumberEntity,
NumberEntityDescription,
)
from homeassistant.const import EntityCategory, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AirobotConfigEntry
from .const import DOMAIN
from .coordinator import AirobotDataUpdateCoordinator
from .entity import AirobotEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AirobotNumberEntityDescription(NumberEntityDescription):
"""Describes Airobot number entity."""
value_fn: Callable[[AirobotDataUpdateCoordinator], float]
set_value_fn: Callable[[AirobotDataUpdateCoordinator, float], Awaitable[None]]
NUMBERS: tuple[AirobotNumberEntityDescription, ...] = (
AirobotNumberEntityDescription(
key="hysteresis_band",
translation_key="hysteresis_band",
device_class=NumberDeviceClass.TEMPERATURE,
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
native_min_value=HYSTERESIS_BAND_MIN / 10.0,
native_max_value=HYSTERESIS_BAND_MAX / 10.0,
native_step=0.1,
value_fn=lambda coordinator: coordinator.data.settings.hysteresis_band,
set_value_fn=lambda coordinator, value: coordinator.client.set_hysteresis_band(
value
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AirobotConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Airobot number platform."""
coordinator = entry.runtime_data
async_add_entities(
AirobotNumber(coordinator, description) for description in NUMBERS
)
class AirobotNumber(AirobotEntity, NumberEntity):
"""Representation of an Airobot number entity."""
entity_description: AirobotNumberEntityDescription
def __init__(
self,
coordinator: AirobotDataUpdateCoordinator,
description: AirobotNumberEntityDescription,
) -> None:
"""Initialize the number entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
@property
def native_value(self) -> float:
"""Return the current value."""
return self.entity_description.value_fn(self.coordinator)
async def async_set_native_value(self, value: float) -> None:
"""Set the value."""
try:
await self.entity_description.set_value_fn(self.coordinator, value)
except AirobotError as err:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="set_value_failed",
translation_placeholders={"error": str(err)},
) from err
else:
await self.coordinator.async_request_refresh()

View File

@@ -48,7 +48,7 @@ rules:
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: Single device integration, no dynamic device discovery needed.
@@ -57,7 +57,7 @@ rules:
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt

View File

@@ -44,11 +44,6 @@
}
},
"entity": {
"number": {
"hysteresis_band": {
"name": "Hysteresis band"
}
},
"sensor": {
"air_temperature": {
"name": "Air temperature"
@@ -79,9 +74,6 @@
},
"set_temperature_failed": {
"message": "Failed to set temperature to {temperature}."
},
"set_value_failed": {
"message": "Failed to set value: {error}"
}
}
}

View File

@@ -3,7 +3,7 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from typing import Any
from aiohttp import CookieJar
from pyanglianwater import AnglianWater
@@ -30,11 +30,14 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_PASSWORD): selector.TextSelector(
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
),
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
}
)
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
async def validate_credentials(
auth: MSOB2CAuth, account_number: str
) -> str | MSOB2CAuth:
"""Validate the provided credentials."""
try:
await auth.send_login_request()
@@ -43,33 +46,6 @@ async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
except Exception:
_LOGGER.exception("Unexpected exception")
return "unknown"
return auth
def humanize_account_data(account: dict) -> str:
"""Convert an account data into a human-readable format."""
if account["address"]["company_name"] != "":
return f"{account['account_number']} - {account['address']['company_name']}"
if account["address"]["building_name"] != "":
return f"{account['account_number']} - {account['address']['building_name']}"
return f"{account['account_number']} - {account['address']['postcode']}"
async def get_accounts(auth: MSOB2CAuth) -> list[selector.SelectOptionDict]:
"""Retrieve the list of accounts associated with the authenticated user."""
_aw = AnglianWater(authenticator=auth)
accounts = await _aw.api.get_associated_accounts()
return [
selector.SelectOptionDict(
value=str(account["account_number"]),
label=humanize_account_data(account),
)
for account in accounts["result"]["active"]
]
async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2CAuth:
"""Validate the provided account number."""
_aw = AnglianWater(authenticator=auth)
try:
await _aw.validate_smart_meter(account_number)
@@ -81,91 +57,36 @@ async def validate_account(auth: MSOB2CAuth, account_number: str) -> str | MSOB2
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anglian Water."""
def __init__(self) -> None:
"""Initialize the config flow."""
self.authenticator: MSOB2CAuth | None = None
self.accounts: list[selector.SelectOptionDict] = []
self.user_input: dict[str, Any] | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self.authenticator = MSOB2CAuth(
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=async_create_clientsession(
self.hass,
cookie_jar=CookieJar(quote_cookie=False),
validation_response = await validate_credentials(
MSOB2CAuth(
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=async_create_clientsession(
self.hass,
cookie_jar=CookieJar(quote_cookie=False),
),
),
user_input[CONF_ACCOUNT_NUMBER],
)
validation_response = await validate_credentials(self.authenticator)
if isinstance(validation_response, str):
errors["base"] = validation_response
else:
self.accounts = await get_accounts(self.authenticator)
if len(self.accounts) > 1:
self.user_input = user_input
return await self.async_step_select_account()
account_number = self.accounts[0]["value"]
self.user_input = user_input
return await self.async_step_complete(
{
CONF_ACCOUNT_NUMBER: account_number,
}
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_ACCOUNT_NUMBER],
data={
**user_input,
CONF_ACCESS_TOKEN: validation_response.refresh_token,
},
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_select_account(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the account selection step."""
errors = {}
if user_input is not None:
if TYPE_CHECKING:
assert self.authenticator
validation_result = await validate_account(
self.authenticator,
user_input[CONF_ACCOUNT_NUMBER],
)
if isinstance(validation_result, str):
errors["base"] = validation_result
else:
return await self.async_step_complete(user_input)
return self.async_show_form(
step_id="select_account",
data_schema=vol.Schema(
{
vol.Required(CONF_ACCOUNT_NUMBER): selector.SelectSelector(
selector.SelectSelectorConfig(
options=self.accounts,
multiple=False,
mode=selector.SelectSelectorMode.DROPDOWN,
)
)
}
),
errors=errors,
)
async def async_step_complete(self, user_input: dict[str, Any]) -> ConfigFlowResult:
"""Handle the final configuration step."""
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
self._abort_if_unique_id_configured()
if TYPE_CHECKING:
assert self.authenticator
assert self.user_input
config_entry_data = {
**self.user_input,
CONF_ACCOUNT_NUMBER: user_input[CONF_ACCOUNT_NUMBER],
CONF_ACCESS_TOKEN: self.authenticator.refresh_token,
}
return self.async_create_entry(
title=user_input[CONF_ACCOUNT_NUMBER],
data=config_entry_data,
)

View File

@@ -10,21 +10,14 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"select_account": {
"data": {
"account_number": "Billing account number"
},
"data_description": {
"account_number": "Select the billing account you wish to use."
},
"description": "Multiple active billing accounts were found with your credentials. Please select the account you wish to use. If this is unexpected, contact Anglian Water to confirm your active accounts."
},
"user": {
"data": {
"account_number": "Billing Account Number",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"account_number": "Your account number found on your latest bill.",
"password": "Your password",
"username": "Username or email used to log in to the Anglian Water website."
},

View File

@@ -29,7 +29,7 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["axis"],
"requirements": ["axis==66"],
"requirements": ["axis==65"],
"ssdp": [
{
"manufacturer": "AXIS"

View File

@@ -25,7 +25,6 @@ from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import (
config_validation as cv,
entity_platform,
entity_registry as er,
issue_registry as ir,
)
from homeassistant.helpers.device_registry import (
@@ -43,12 +42,7 @@ from homeassistant.util import dt as dt_util, slugify
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
from .coordinator import BluesoundCoordinator
from .utils import (
dispatcher_join_signal,
dispatcher_unjoin_signal,
format_unique_id,
id_to_paired_player,
)
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
if TYPE_CHECKING:
from . import BluesoundConfigEntry
@@ -89,11 +83,9 @@ async def async_setup_entry(
SERVICE_CLEAR_TIMER, None, "async_clear_timer"
)
platform.async_register_entity_service(
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_bluesound_join"
)
platform.async_register_entity_service(
SERVICE_UNJOIN, None, "async_bluesound_unjoin"
SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, "async_join"
)
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
async_add_entities([bluesound_player], update_before_add=True)
@@ -128,7 +120,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
self._presets: list[Preset] = coordinator.data.presets
self._group_name: str | None = None
self._group_list: list[str] = []
self._group_members: list[str] | None = None
self._bluesound_device_name = sync_status.name
self._player = player
self._last_status_update = dt_util.utcnow()
@@ -189,7 +180,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
self._last_status_update = dt_util.utcnow()
self._group_list = self.rebuild_bluesound_group()
self._group_members = self.rebuild_group_members()
self.async_write_ha_state()
@@ -375,13 +365,11 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
MediaPlayerEntityFeature.VOLUME_STEP
| MediaPlayerEntityFeature.VOLUME_SET
| MediaPlayerEntityFeature.VOLUME_MUTE
| MediaPlayerEntityFeature.GROUPING
)
supported = (
MediaPlayerEntityFeature.CLEAR_PLAYLIST
| MediaPlayerEntityFeature.BROWSE_MEDIA
| MediaPlayerEntityFeature.GROUPING
)
if not self._status.indexing:
@@ -433,57 +421,8 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
return shuffle
@property
def group_members(self) -> list[str] | None:
"""Get list of group members. Leader is always first."""
return self._group_members
async def async_join_players(self, group_members: list[str]) -> None:
"""Join `group_members` as a player group with the current player."""
if self.entity_id in group_members:
raise ServiceValidationError("Cannot join player to itself")
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
paired_players = []
for group_member in group_members:
sync_status = entity_ids_with_sync_status.get(group_member)
if sync_status is None:
continue
paired_player = id_to_paired_player(sync_status.id)
if paired_player:
paired_players.append(paired_player)
if paired_players:
await self._player.add_followers(paired_players)
async def async_unjoin_player(self) -> None:
"""Remove this player from any group."""
if self._sync_status.leader is not None:
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
async_dispatcher_send(
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
)
if self._sync_status.followers is not None:
await self._player.remove_follower(self.host, self.port)
async def async_bluesound_join(self, master: str) -> None:
async def async_join(self, master: str) -> None:
"""Join the player to a group."""
ir.async_create_issue(
self.hass,
DOMAIN,
f"deprecated_service_{SERVICE_JOIN}",
is_fixable=False,
breaks_in_ha_version="2026.7.0",
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_service_join",
translation_placeholders={
"name": slugify(self.sync_status.name),
},
)
if master == self.entity_id:
raise ServiceValidationError("Cannot join player to itself")
@@ -492,23 +431,17 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
self.hass, dispatcher_join_signal(master), self.host, self.port
)
async def async_bluesound_unjoin(self) -> None:
async def async_unjoin(self) -> None:
"""Unjoin the player from a group."""
ir.async_create_issue(
self.hass,
DOMAIN,
f"deprecated_service_{SERVICE_UNJOIN}",
is_fixable=False,
breaks_in_ha_version="2026.7.0",
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_service_unjoin",
translation_placeholders={
"name": slugify(self.sync_status.name),
},
)
if self._sync_status.leader is None:
return
await self.async_unjoin_player()
leader_id = f"{self._sync_status.leader.ip}:{self._sync_status.leader.port}"
_LOGGER.debug("Trying to unjoin player: %s", self.id)
async_dispatcher_send(
self.hass, dispatcher_unjoin_signal(leader_id), self.host, self.port
)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
@@ -555,63 +488,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
follower_names.insert(0, leader_sync_status.name)
return follower_names
def rebuild_group_members(self) -> list[str] | None:
"""Get list of group members. Leader is always first."""
if self.sync_status.leader is None and self.sync_status.followers is None:
return None
entity_ids_with_sync_status = self._entity_ids_with_sync_status()
leader_entity_id = None
followers = None
if self.sync_status.followers is not None:
leader_entity_id = self.entity_id
followers = self.sync_status.followers
elif self.sync_status.leader is not None:
leader_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
for entity_id, sync_status in entity_ids_with_sync_status.items():
if sync_status.id == leader_id:
leader_entity_id = entity_id
followers = sync_status.followers
break
if leader_entity_id is None or followers is None:
return None
grouped_entity_ids = [leader_entity_id]
for follower in followers:
follower_id = f"{follower.ip}:{follower.port}"
entity_ids = [
entity_id
for entity_id, sync_status in entity_ids_with_sync_status.items()
if sync_status.id == follower_id
]
match entity_ids:
case [entity_id]:
grouped_entity_ids.append(entity_id)
return grouped_entity_ids
def _entity_ids_with_sync_status(self) -> dict[str, SyncStatus]:
result = {}
entity_registry = er.async_get(self.hass)
config_entries: list[BluesoundConfigEntry] = (
self.hass.config_entries.async_entries(DOMAIN)
)
for config_entry in config_entries:
entity_entries = er.async_entries_for_config_entry(
entity_registry, config_entry.entry_id
)
for entity_entry in entity_entries:
if entity_entry.domain == "media_player":
result[entity_entry.entity_id] = (
config_entry.runtime_data.coordinator.data.sync_status
)
return result
async def async_add_follower(self, host: str, port: int) -> None:
"""Add follower to leader."""
await self._player.add_follower(host, port)

View File

@@ -41,17 +41,9 @@
"description": "Use `button.{name}_clear_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
"title": "Detected use of deprecated action bluesound.clear_sleep_timer"
},
"deprecated_service_join": {
"description": "Use the `media_player.join` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
"title": "Detected use of deprecated action bluesound.join"
},
"deprecated_service_set_sleep_timer": {
"description": "Use `button.{name}_set_sleep_timer` instead.\n\nPlease replace this action and adjust your automations and scripts.",
"title": "Detected use of deprecated action bluesound.set_sleep_timer"
},
"deprecated_service_unjoin": {
"description": "Use the `media_player.unjoin` action instead.\n\nPlease replace this action and adjust your automations and scripts.",
"title": "Detected use of deprecated action bluesound.unjoin"
}
},
"services": {

View File

@@ -1,7 +1,5 @@
"""Utility functions for the Bluesound component."""
from pyblu import PairedPlayer
from homeassistant.helpers.device_registry import format_mac
@@ -21,12 +19,3 @@ def dispatcher_unjoin_signal(leader_id: str) -> str:
Id is ip_address:port. This can be obtained from sync_status.id.
"""
return f"bluesound_unjoin_{leader_id}"
def id_to_paired_player(id: str) -> PairedPlayer | None:
"""Try to convert id in format 'ip:port' to PairedPlayer. Returns None if unable to do so."""
match id.rsplit(":", 1):
case [str() as ip, str() as port] if port.isdigit():
return PairedPlayer(ip, int(port))
case _:
return None

View File

@@ -15,5 +15,13 @@
"get_events": {
"service": "mdi:calendar-month"
}
},
"triggers": {
"event_ended": {
"trigger": "mdi:calendar-end"
},
"event_started": {
"trigger": "mdi:calendar-start"
}
}
}

View File

@@ -1,4 +1,10 @@
{
"common": {
"trigger_event_offset_description": "Offset from the event time.",
"trigger_event_offset_name": "Offset",
"trigger_event_offset_type_description": "Whether to trigger before or after the event time, if an offset is defined.",
"trigger_event_offset_type_name": "Offset type"
},
"entity_component": {
"_": {
"name": "[%key:component::calendar::title%]",
@@ -45,6 +51,14 @@
"title": "Detected use of deprecated action calendar.list_events"
}
},
"selector": {
"trigger_offset_type": {
"options": {
"after": "After",
"before": "Before"
}
}
},
"services": {
"create_event": {
"description": "Adds a new calendar event.",
@@ -103,5 +117,35 @@
"name": "Get events"
}
},
"title": "Calendar"
"title": "Calendar",
"triggers": {
"event_ended": {
"description": "Triggers when a calendar event ends.",
"fields": {
"offset": {
"description": "[%key:component::calendar::common::trigger_event_offset_description%]",
"name": "[%key:component::calendar::common::trigger_event_offset_name%]"
},
"offset_type": {
"description": "[%key:component::calendar::common::trigger_event_offset_type_description%]",
"name": "[%key:component::calendar::common::trigger_event_offset_type_name%]"
}
},
"name": "Calendar event ended"
},
"event_started": {
"description": "Triggers when a calendar event starts.",
"fields": {
"offset": {
"description": "[%key:component::calendar::common::trigger_event_offset_description%]",
"name": "[%key:component::calendar::common::trigger_event_offset_name%]"
},
"offset_type": {
"description": "[%key:component::calendar::common::trigger_event_offset_type_description%]",
"name": "[%key:component::calendar::common::trigger_event_offset_type_name%]"
}
},
"name": "Calendar event started"
}
}
}

View File

@@ -10,8 +10,14 @@ from typing import TYPE_CHECKING, Any, cast
import voluptuous as vol
from homeassistant.const import CONF_ENTITY_ID, CONF_EVENT, CONF_OFFSET, CONF_OPTIONS
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_EVENT,
CONF_OFFSET,
CONF_OPTIONS,
CONF_TARGET,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.automation import move_top_level_schema_fields_to_options
@@ -20,12 +26,13 @@ from homeassistant.helpers.event import (
async_track_point_in_time,
async_track_time_interval,
)
from homeassistant.helpers.target import TargetEntityChangeTracker, TargetSelection
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from . import CalendarEntity, CalendarEvent
from .const import DATA_COMPONENT
from .const import DATA_COMPONENT, DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -33,19 +40,35 @@ EVENT_START = "start"
EVENT_END = "end"
UPDATE_INTERVAL = datetime.timedelta(minutes=15)
CONF_OFFSET_TYPE = "offset_type"
OFFSET_TYPE_BEFORE = "before"
OFFSET_TYPE_AFTER = "after"
_OPTIONS_SCHEMA_DICT = {
_SINGLE_ENTITY_OPTIONS_SCHEMA_DICT = {
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_EVENT, default=EVENT_START): vol.In({EVENT_START, EVENT_END}),
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
}
_CONFIG_SCHEMA = vol.Schema(
_SINGLE_ENTITY_SCHEMA = vol.Schema(
{
vol.Required(CONF_OPTIONS): _OPTIONS_SCHEMA_DICT,
vol.Required(CONF_OPTIONS): _SINGLE_ENTITY_OPTIONS_SCHEMA_DICT,
},
)
_EVENT_TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_OPTIONS, default={}): {
vol.Optional(CONF_OFFSET, default=datetime.timedelta(0)): cv.time_period,
vol.Optional(CONF_OFFSET_TYPE, default=OFFSET_TYPE_BEFORE): vol.In(
{OFFSET_TYPE_BEFORE, OFFSET_TYPE_AFTER}
),
},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
# mypy: disallow-any-generics
@@ -110,15 +133,19 @@ def get_entity(hass: HomeAssistant, entity_id: str) -> CalendarEntity:
return entity
def event_fetcher(hass: HomeAssistant, entity_id: str) -> EventFetcher:
def event_fetcher(hass: HomeAssistant, entity_ids: set[str]) -> EventFetcher:
"""Build an async_get_events wrapper to fetch events during a time span."""
async def async_get_events(timespan: Timespan) -> list[CalendarEvent]:
"""Return events active in the specified time span."""
entity = get_entity(hass, entity_id)
# Expand by one second to make the end time exclusive
end_time = timespan.end + datetime.timedelta(seconds=1)
return await entity.async_get_events(hass, timespan.start, end_time)
events: list[CalendarEvent] = []
for entity_id in entity_ids:
entity = get_entity(hass, entity_id)
events.extend(await entity.async_get_events(hass, timespan.start, end_time))
return events
return async_get_events
@@ -260,8 +287,68 @@ class CalendarEventListener:
self._listen_next_calendar_event()
class EventTrigger(Trigger):
"""Calendar event trigger."""
class TargetCalendarEventListener(TargetEntityChangeTracker):
"""Helper class to listen to calendar events for target entity changes."""
def __init__(
self,
hass: HomeAssistant,
target_selection: TargetSelection,
event_type: str,
offset: datetime.timedelta,
run_action: TriggerActionRunner,
) -> None:
"""Initialize the state change tracker."""
def entity_filter(entities: set[str]) -> set[str]:
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
super().__init__(hass, target_selection, entity_filter)
self._event_type = event_type
self._offset = offset
self._run_action = run_action
self._trigger_data = {
"event": event_type,
"offset": offset,
}
self._calendar_event_listener: CalendarEventListener | None = None
def _handle_entities(self, tracked_entities: set[str]) -> None:
"""Handle the tracked entities."""
self._hass.async_create_task(self._start_listening(tracked_entities))
async def _start_listening(self, tracked_entities: set[str]) -> None:
"""Start listening for calendar events."""
_LOGGER.debug("Tracking events for calendars: %s", tracked_entities)
if self._calendar_event_listener:
self._calendar_event_listener.async_detach()
self._calendar_event_listener = CalendarEventListener(
self._hass,
self._run_action,
self._trigger_data,
queued_event_fetcher(
event_fetcher(self._hass, tracked_entities),
self._event_type,
self._offset,
),
)
await self._calendar_event_listener.async_attach()
def _unsubscribe(self) -> None:
"""Unsubscribe from all events."""
super()._unsubscribe()
if self._calendar_event_listener:
self._calendar_event_listener.async_detach()
self._calendar_event_listener = None
class SingleEntityEventTrigger(Trigger):
"""Legacy single calendar entity event trigger."""
_options: dict[str, Any]
@@ -271,7 +358,7 @@ class EventTrigger(Trigger):
) -> ConfigType:
"""Validate complete config."""
complete_config = move_top_level_schema_fields_to_options(
complete_config, _OPTIONS_SCHEMA_DICT
complete_config, _SINGLE_ENTITY_OPTIONS_SCHEMA_DICT
)
return await super().async_validate_complete_config(hass, complete_config)
@@ -280,7 +367,7 @@ class EventTrigger(Trigger):
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, _CONFIG_SCHEMA(config))
return cast(ConfigType, _SINGLE_ENTITY_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize trigger."""
@@ -311,15 +398,72 @@ class EventTrigger(Trigger):
run_action,
trigger_data,
queued_event_fetcher(
event_fetcher(self._hass, entity_id), event_type, offset
event_fetcher(self._hass, {entity_id}), event_type, offset
),
)
await listener.async_attach()
return listener.async_detach
class EventTrigger(Trigger):
"""Calendar event trigger."""
_options: dict[str, Any]
_event_type: str
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, _EVENT_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize trigger."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.target is not None
assert config.options is not None
self._target = config.target
self._options = config.options
async def async_attach_runner(
self, run_action: TriggerActionRunner
) -> CALLBACK_TYPE:
"""Attach a trigger."""
offset = self._options[CONF_OFFSET]
offset_type = self._options.get(CONF_OFFSET_TYPE, OFFSET_TYPE_BEFORE)
if offset_type == OFFSET_TYPE_BEFORE:
offset = -offset
target_selection = TargetSelection(self._target)
if not target_selection.has_any_target:
raise HomeAssistantError(f"No target defined in {self._target}")
listener = TargetCalendarEventListener(
self._hass, target_selection, self._event_type, offset, run_action
)
return listener.async_setup()
class EventStartedTrigger(EventTrigger):
"""Calendar event started trigger."""
_event_type = EVENT_START
class EventEndedTrigger(EventTrigger):
"""Calendar event ended trigger."""
_event_type = EVENT_END
TRIGGERS: dict[str, type[Trigger]] = {
"_": EventTrigger,
"_": SingleEntityEventTrigger,
"event_started": EventStartedTrigger,
"event_ended": EventEndedTrigger,
}

View File

@@ -0,0 +1,27 @@
.trigger_common: &trigger_common
target:
entity:
domain: calendar
fields:
offset:
required: true
default:
days: 0
hours: 0
minutes: 0
seconds: 0
selector:
duration:
enable_day: true
offset_type:
required: true
default: before
selector:
select:
translation_key: trigger_offset_type
options:
- before
- after
event_started: *trigger_common
event_ended: *trigger_common

View File

@@ -35,7 +35,7 @@
"cpu_overheating": "CPU overheating",
"none": "None",
"pellets": "Pellets",
"unknown": "Unknown alarm"
"unkownn": "Unknown alarm"
}
},
"convector_air_flow": {

View File

@@ -8,7 +8,8 @@ import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import RepairsFlow
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import issue_registry as ir
from .manager import async_replace_device
@@ -21,6 +22,13 @@ class ESPHomeRepair(RepairsFlow):
self._data = data
super().__init__()
@callback
def _async_get_placeholders(self) -> dict[str, str]:
issue_registry = ir.async_get(self.hass)
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
assert issue is not None
return issue.translation_placeholders or {}
class DeviceConflictRepair(ESPHomeRepair):
"""Handler for an issue fixing device conflict."""
@@ -50,6 +58,7 @@ class DeviceConflictRepair(ESPHomeRepair):
return self.async_show_menu(
step_id="init",
menu_options=["migrate", "manual"],
description_placeholders=self._async_get_placeholders(),
)
async def async_step_migrate(
@@ -60,6 +69,7 @@ class DeviceConflictRepair(ESPHomeRepair):
return self.async_show_form(
step_id="migrate",
data_schema=vol.Schema({}),
description_placeholders=self._async_get_placeholders(),
)
entry_id = self.entry_id
await async_replace_device(self.hass, entry_id, self.stored_mac, self.mac)
@@ -74,6 +84,7 @@ class DeviceConflictRepair(ESPHomeRepair):
return self.async_show_form(
step_id="manual",
data_schema=vol.Schema({}),
description_placeholders=self._async_get_placeholders(),
)
self.hass.config_entries.async_schedule_reload(self.entry_id)
return self.async_create_entry(data={})

View File

@@ -1,31 +0,0 @@
"""The Fluss+ integration."""
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from .coordinator import FlussDataUpdateCoordinator
PLATFORMS: list[Platform] = [Platform.BUTTON]
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
async def async_setup_entry(
hass: HomeAssistant,
entry: FlussConfigEntry,
) -> bool:
"""Set up Fluss+ from a config entry."""
coordinator = FlussDataUpdateCoordinator(hass, entry, entry.data[CONF_API_KEY])
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: FlussConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,40 +0,0 @@
"""Support for Fluss Devices."""
from homeassistant.components.button import ButtonEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FlussApiClientError, FlussDataUpdateCoordinator
from .entity import FlussEntity
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
async def async_setup_entry(
hass: HomeAssistant,
entry: FlussConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Fluss Devices, filtering out any invalid payloads."""
coordinator = entry.runtime_data
devices = coordinator.data
async_add_entities(
FlussButton(coordinator, device_id, device)
for device_id, device in devices.items()
)
class FlussButton(FlussEntity, ButtonEntity):
"""Representation of a Fluss button device."""
_attr_name = None
async def async_press(self) -> None:
"""Handle the button press."""
try:
await self.coordinator.api.async_trigger_device(self.device_id)
except FlussApiClientError as err:
raise HomeAssistantError(f"Failed to trigger device: {err}") from err

View File

@@ -1,55 +0,0 @@
"""Config flow for Fluss+ integration."""
from __future__ import annotations
from typing import Any
from fluss_api import (
FlussApiClient,
FlussApiClientAuthenticationError,
FlussApiClientCommunicationError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN, LOGGER
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): cv.string})
class FlussConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Fluss+."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
api_key = user_input[CONF_API_KEY]
self._async_abort_entries_match({CONF_API_KEY: api_key})
client = FlussApiClient(
user_input[CONF_API_KEY], session=async_get_clientsession(self.hass)
)
try:
await client.async_get_devices()
except FlussApiClientCommunicationError:
errors["base"] = "cannot_connect"
except FlussApiClientAuthenticationError:
errors["base"] = "invalid_auth"
except Exception: # noqa: BLE001
LOGGER.exception("Unexpected exception occurred")
errors["base"] = "unknown"
if not errors:
return self.async_create_entry(
title="My Fluss+ Devices", data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)

View File

@@ -1,9 +0,0 @@
"""Constants for the Fluss+ integration."""
from datetime import timedelta
import logging
DOMAIN = "fluss"
LOGGER = logging.getLogger(__name__)
UPDATE_INTERVAL = 60 # seconds
UPDATE_INTERVAL_TIMEDELTA = timedelta(seconds=UPDATE_INTERVAL)

View File

@@ -1,50 +0,0 @@
"""DataUpdateCoordinator for Fluss+ integration."""
from __future__ import annotations
from typing import Any
from fluss_api import (
FlussApiClient,
FlussApiClientAuthenticationError,
FlussApiClientError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import slugify
from .const import LOGGER, UPDATE_INTERVAL_TIMEDELTA
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Manages fetching Fluss device data on a schedule."""
def __init__(
self, hass: HomeAssistant, config_entry: FlussConfigEntry, api_key: str
) -> None:
"""Initialize the coordinator."""
self.api = FlussApiClient(api_key, session=async_get_clientsession(hass))
super().__init__(
hass,
LOGGER,
name=f"Fluss+ ({slugify(api_key[:8])})",
config_entry=config_entry,
update_interval=UPDATE_INTERVAL_TIMEDELTA,
)
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
"""Fetch data from the Fluss API and return as a dictionary keyed by deviceId."""
try:
devices = await self.api.async_get_devices()
except FlussApiClientAuthenticationError as err:
raise ConfigEntryError(f"Authentication failed: {err}") from err
except FlussApiClientError as err:
raise UpdateFailed(f"Error fetching Fluss devices: {err}") from err
return {device["deviceId"]: device for device in devices.get("devices", [])}

View File

@@ -1,39 +0,0 @@
"""Base entities for the Fluss+ integration."""
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .coordinator import FlussDataUpdateCoordinator
class FlussEntity(CoordinatorEntity[FlussDataUpdateCoordinator]):
"""Base class for Fluss entities."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: FlussDataUpdateCoordinator,
device_id: str,
device: dict,
) -> None:
"""Initialize the entity with a device ID and device data."""
super().__init__(coordinator)
self.device_id = device_id
self._attr_unique_id = device_id
self._attr_device_info = DeviceInfo(
identifiers={("fluss", device_id)},
name=device.get("deviceName"),
manufacturer="Fluss",
model="Fluss+ Device",
)
@property
def available(self) -> bool:
"""Return if the device is available."""
return super().available and self.device_id in self.coordinator.data
@property
def device(self) -> dict:
"""Return the stored device data."""
return self.coordinator.data[self.device_id]

View File

@@ -1,11 +0,0 @@
{
"domain": "fluss",
"name": "Fluss+",
"codeowners": ["@fluss"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/fluss",
"iot_class": "cloud_polling",
"loggers": ["fluss-api"],
"quality_scale": "bronze",
"requirements": ["fluss-api==0.1.9.20"]
}

View File

@@ -1,69 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
No actions present
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: todo
test-coverage: todo
# Gold
entity-translations: done
entity-device-class: done
devices: done
entity-category: done
entity-disabled-by-default:
status: exempt
comment: |
Not needed
discovery: todo
stale-devices: todo
diagnostics: todo
exception-translations: todo
icon-translations:
status: exempt
comment: |
No icons used
reconfiguration-flow: todo
dynamic-devices: todo
discovery-update-info: todo
repair-issues:
status: exempt
comment: |
No issues to repair
docs-use-cases: done
docs-supported-devices: todo
docs-supported-functions: done
docs-data-update: todo
docs-known-limitations: done
docs-troubleshooting: todo
docs-examples: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -1,23 +0,0 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "The API key found in the profile page of the Fluss+ app."
},
"description": "Your Fluss API key, available in the profile page of the Fluss+ app"
}
}
}
}

View File

@@ -25,7 +25,7 @@ from homeassistant.const import (
EVENT_PANELS_UPDATED,
EVENT_THEMES_UPDATED,
)
from homeassistant.core import HomeAssistant, ServiceCall, async_get_hass, callback
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers.icon import async_get_icons
@@ -41,7 +41,6 @@ from .storage import async_setup_frontend_storage
_LOGGER = logging.getLogger(__name__)
DOMAIN = "frontend"
CONF_NAME_DARK = "name_dark"
CONF_THEMES = "themes"
CONF_THEMES_MODES = "modes"
CONF_THEMES_LIGHT = "light"
@@ -527,16 +526,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
def _validate_selected_theme(theme: str) -> str:
"""Validate that a user selected theme is a valid theme."""
if theme in (DEFAULT_THEME, VALUE_NO_THEME):
return theme
hass = async_get_hass()
if theme not in hass.data[DATA_THEMES]:
raise vol.Invalid(f"Theme {theme} not found")
return theme
async def _async_setup_themes(
hass: HomeAssistant, themes: dict[str, Any] | None
) -> None:
@@ -580,32 +569,27 @@ async def _async_setup_themes(
@callback
def set_theme(call: ServiceCall) -> None:
"""Set backend-preferred theme."""
name = call.data[CONF_NAME]
mode = call.data.get("mode", "light")
def _update_hass_theme(theme: str, light: bool) -> None:
theme_key = DATA_DEFAULT_THEME if light else DATA_DEFAULT_DARK_THEME
if theme == VALUE_NO_THEME:
to_set = DEFAULT_THEME if light else None
else:
_LOGGER.info(
"Theme %s set as default %s theme",
theme,
"light" if light else "dark",
)
to_set = theme
hass.data[theme_key] = to_set
if (
name not in (DEFAULT_THEME, VALUE_NO_THEME)
and name not in hass.data[DATA_THEMES]
):
_LOGGER.warning("Theme %s not found", name)
return
name = call.data.get(CONF_NAME)
if name is not None and CONF_MODE in call.data:
mode = call.data.get("mode", "light")
light_mode = mode == "light"
_update_hass_theme(name, light_mode)
light_mode = mode == "light"
theme_key = DATA_DEFAULT_THEME if light_mode else DATA_DEFAULT_DARK_THEME
if name == VALUE_NO_THEME:
to_set = DEFAULT_THEME if light_mode else None
else:
name_dark = call.data.get(CONF_NAME_DARK)
if name:
_update_hass_theme(name, True)
if name_dark:
_update_hass_theme(name_dark, False)
_LOGGER.info("Theme %s set as default %s theme", name, mode)
to_set = name
hass.data[theme_key] = to_set
store.async_delay_save(
lambda: {
DATA_DEFAULT_THEME: hass.data[DATA_DEFAULT_THEME],
@@ -640,13 +624,11 @@ async def _async_setup_themes(
DOMAIN,
SERVICE_SET_THEME,
set_theme,
vol.All(
vol.Schema(
{
vol.Optional(CONF_NAME): _validate_selected_theme,
vol.Exclusive(CONF_NAME_DARK, "dark_modes"): _validate_selected_theme,
vol.Exclusive(CONF_MODE, "dark_modes"): vol.Any("dark", "light"),
},
cv.has_at_least_one_key(CONF_NAME, CONF_NAME_DARK),
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_MODE): vol.Any("dark", "light"),
}
),
)

View File

@@ -3,15 +3,17 @@
set_theme:
fields:
name:
required: false
required: true
example: "default"
selector:
theme:
include_default: true
name_dark:
required: false
example: "default"
mode:
default: "light"
selector:
theme:
include_default: true
select:
options:
- "dark"
- "light"
translation_key: mode
reload_themes:

View File

@@ -7,24 +7,32 @@
"name": "Winter mode"
}
},
"selector": {
"mode": {
"options": {
"dark": "Dark",
"light": "Light"
}
}
},
"services": {
"reload_themes": {
"description": "Reloads themes from the YAML-configuration.",
"name": "Reload themes"
},
"set_theme": {
"description": "Sets the theme Home Assistant uses. Can be overridden by a user.",
"description": "Sets the default theme Home Assistant uses. Can be overridden by a user.",
"fields": {
"name": {
"description": "Name of the theme that is used by default.",
"name": "Theme"
"mode": {
"description": "Theme mode.",
"name": "Mode"
},
"name_dark": {
"description": "Alternative dark-mode theme that is used by default.",
"name": "Dark theme override"
"name": {
"description": "Name of a theme.",
"name": "Theme"
}
},
"name": "Set theme"
"name": "Set the default theme"
}
}
}

View File

@@ -44,16 +44,11 @@ class HomeWizardBatteryModeSelectEntity(HomeWizardEntity, SelectEntity):
"""Initialize the switch."""
super().__init__(coordinator)
batteries = coordinator.data.batteries
battery_count = batteries.battery_count if batteries is not None else None
entity_registry_enabled_default = (
battery_count is not None and battery_count > 0
)
description = SelectEntityDescription(
key="battery_group_mode",
translation_key="battery_group_mode",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=entity_registry_enabled_default,
entity_registry_enabled_default=False,
options=[
str(mode)
for mode in (coordinator.data.device.supported_battery_modes() or [])

View File

@@ -198,7 +198,6 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
"dlbandwidth": HuaweiSensorEntityDescription(
key="dlbandwidth",
translation_key="downlink_bandwidth",
# https://en.wikipedia.org/wiki/LTE_frequency_bands, arbitrary
icon_fn=lambda x: bandwidth_icon((8, 15), x),
entity_category=EntityCategory.DIAGNOSTIC,
),
@@ -217,7 +216,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="ecio",
translation_key="ecio",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/EC/IO
# https://wiki.teltonika.lt/view/EC/IO
icon_fn=lambda x: signal_icon((-20, -10, -6), x),
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
@@ -292,8 +291,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
"nrdlbandwidth": HuaweiSensorEntityDescription(
key="nrdlbandwidth",
translation_key="nrdlbandwidth",
# https://en.wikipedia.org/wiki/5G_NR_frequency_bands, arbitrary
icon_fn=lambda x: bandwidth_icon((33, 66), x),
# Could add icon_fn like we have for dlbandwidth,
# if we find a good source what to use as 5G thresholds.
entity_category=EntityCategory.DIAGNOSTIC,
),
"nrdlmcs": HuaweiSensorEntityDescription(
@@ -315,8 +314,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="nrrsrp",
translation_key="nrrsrp",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
icon_fn=lambda x: signal_icon((-100, -90, -80), x),
# Could add icon_fn as in rsrp, source for 5G thresholds?
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -325,8 +323,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="nrrsrq",
translation_key="nrrsrq",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
icon_fn=lambda x: signal_icon((-20, -15, -10), x),
# Could add icon_fn as in rsrq, source for 5G thresholds?
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -335,8 +332,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="nrsinr",
translation_key="nrsinr",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/SINR
icon_fn=lambda x: signal_icon((0, 13, 20), x),
# Could add icon_fn as in sinr, source for thresholds?
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -358,8 +354,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
"nrulbandwidth": HuaweiSensorEntityDescription(
key="nrulbandwidth",
translation_key="nrulbandwidth",
# https://en.wikipedia.org/wiki/5G_NR_frequency_bands, arbitrary
icon_fn=lambda x: bandwidth_icon((33, 66), x),
# Could add icon_fn as in ulbandwidth, source for 5G thresholds?
entity_category=EntityCategory.DIAGNOSTIC,
),
"nrulmcs": HuaweiSensorEntityDescription(
@@ -391,7 +386,7 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="rscp",
translation_key="rscp",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/RSCP
# https://wiki.teltonika.lt/view/RSCP
icon_fn=lambda x: signal_icon((-95, -85, -75), x),
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
@@ -400,8 +395,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="rsrp",
translation_key="rsrp",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
icon_fn=lambda x: signal_icon((-100, -90, -80), x),
# http://www.lte-anbieter.info/technik/rsrp.php # codespell:ignore technik
icon_fn=lambda x: signal_icon((-110, -95, -80), x),
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -410,8 +405,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="rsrq",
translation_key="rsrq",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/RSRP_and_RSRQ
icon_fn=lambda x: signal_icon((-20, -15, -10), x),
# http://www.lte-anbieter.info/technik/rsrq.php # codespell:ignore technik
icon_fn=lambda x: signal_icon((-11, -8, -5), x),
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -420,8 +415,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="rssi",
translation_key="rssi",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/RSSI
icon_fn=lambda x: signal_icon((-95, -85, -75), x),
# https://eyesaas.com/wi-fi-signal-strength/
icon_fn=lambda x: signal_icon((-80, -70, -60), x),
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -441,8 +436,8 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
key="sinr",
translation_key="sinr",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
# https://wiki.teltonika-networks.com/view/SINR
icon_fn=lambda x: signal_icon((0, 13, 20), x),
# http://www.lte-anbieter.info/technik/sinr.php # codespell:ignore technik
icon_fn=lambda x: signal_icon((0, 5, 10), x),
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True,
@@ -484,7 +479,6 @@ SENSOR_META: dict[str, HuaweiSensorGroup] = {
"ulbandwidth": HuaweiSensorEntityDescription(
key="ulbandwidth",
translation_key="uplink_bandwidth",
# https://en.wikipedia.org/wiki/LTE_frequency_bands, arbitrary
icon_fn=lambda x: bandwidth_icon((8, 15), x),
entity_category=EntityCategory.DIAGNOSTIC,
),
@@ -779,15 +773,10 @@ async def async_setup_entry(
continue
if key_meta := SENSOR_META.get(key):
if key_meta.include:
items = {k: v for k, v in items.items() if key_meta.include.search(k)}
items = filter(key_meta.include.search, items)
if key_meta.exclude:
items = {
k: v for k, v in items.items() if not key_meta.exclude.search(k)
}
for item, value in items.items():
if value is None:
_LOGGER.debug("Ignoring sensor %s.%s due to None value", key, item)
continue
items = [x for x in items if not key_meta.exclude.search(x)]
for item in items:
if not (desc := SENSOR_META[key].descriptions.get(item)):
_LOGGER.debug( # pylint: disable=hass-logger-period # false positive
(

View File

@@ -168,7 +168,6 @@ SUPPORTED_PLATFORMS_UI: Final = {
Platform.FAN,
Platform.DATETIME,
Platform.LIGHT,
Platform.SCENE,
Platform.SENSOR,
Platform.SWITCH,
Platform.TIME,
@@ -228,9 +227,3 @@ class FanConf:
"""Common config keys for fan."""
MAX_STEP: Final = "max_step"
class SceneConf:
"""Common config keys for scene."""
SCENE_NUMBER: Final = "scene_number"

View File

@@ -110,6 +110,13 @@ def _data_secure_group_key_issue_handler(
class DataSecureGroupIssueRepairFlow(RepairsFlow):
"""Handler for an issue fixing flow for outdated DataSecure keys."""
@callback
def _async_get_placeholders(self) -> dict[str, str]:
issue_registry = ir.async_get(self.hass)
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
assert issue is not None
return issue.translation_placeholders or {}
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
@@ -150,6 +157,7 @@ class DataSecureGroupIssueRepairFlow(RepairsFlow):
return self.async_show_form(
step_id="secure_knxkeys",
data_schema=vol.Schema(fields),
description_placeholders=self._async_get_placeholders(),
errors=errors,
)

View File

@@ -10,23 +10,13 @@ from homeassistant import config_entries
from homeassistant.components.scene import BaseScene
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
async_get_current_platform,
)
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY, SceneConf
from .entity import (
KnxUiEntity,
KnxUiEntityPlatformController,
KnxYamlEntity,
_KnxEntityBase,
)
from .const import KNX_ADDRESS, KNX_MODULE_KEY
from .entity import KnxYamlEntity
from .knx_module import KNXModule
from .schema import SceneSchema
from .storage.const import CONF_ENTITY, CONF_GA_SCENE
from .storage.util import ConfigExtractor
async def async_setup_entry(
@@ -36,53 +26,18 @@ async def async_setup_entry(
) -> None:
"""Set up scene(s) for KNX platform."""
knx_module = hass.data[KNX_MODULE_KEY]
platform = async_get_current_platform()
knx_module.config_store.add_platform(
platform=Platform.SCENE,
controller=KnxUiEntityPlatformController(
knx_module=knx_module,
entity_platform=platform,
entity_class=KnxUiScene,
),
)
config: list[ConfigType] = knx_module.config_yaml[Platform.SCENE]
entities: list[KnxYamlEntity | KnxUiEntity] = []
if yaml_platform_config := knx_module.config_yaml.get(Platform.SCENE):
entities.extend(
KnxYamlScene(knx_module, entity_config)
for entity_config in yaml_platform_config
)
if ui_config := knx_module.config_store.data["entities"].get(Platform.SCENE):
entities.extend(
KnxUiScene(knx_module, unique_id, config)
for unique_id, config in ui_config.items()
)
if entities:
async_add_entities(entities)
async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config)
class _KnxScene(BaseScene, _KnxEntityBase):
class KNXScene(KnxYamlEntity, BaseScene):
"""Representation of a KNX scene."""
_device: XknxScene
async def _async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._device.run()
def after_update_callback(self, device: XknxDevice) -> None:
"""Call after device was updated."""
self._async_record_activation()
super().after_update_callback(device)
class KnxYamlScene(_KnxScene, KnxYamlEntity):
"""Representation of a KNX scene configured from YAML."""
_device: XknxScene
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize KNX scene."""
"""Init KNX scene."""
super().__init__(
knx_module=knx_module,
device=XknxScene(
@@ -97,28 +52,11 @@ class KnxYamlScene(_KnxScene, KnxYamlEntity):
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
)
async def _async_activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
await self._device.run()
class KnxUiScene(_KnxScene, KnxUiEntity):
"""Representation of a KNX scene configured from the UI."""
_device: XknxScene
def __init__(
self,
knx_module: KNXModule,
unique_id: str,
config: ConfigType,
) -> None:
"""Initialize KNX scene."""
super().__init__(
knx_module=knx_module,
unique_id=unique_id,
entity_config=config[CONF_ENTITY],
)
knx_conf = ConfigExtractor(config[DOMAIN])
self._device = XknxScene(
xknx=knx_module.xknx,
name=config[CONF_ENTITY][CONF_NAME],
group_address=knx_conf.get_write(CONF_GA_SCENE),
scene_number=knx_conf.get(SceneConf.SCENE_NUMBER),
)
def after_update_callback(self, device: XknxDevice) -> None:
"""Call after device was updated."""
self._async_record_activation()
super().after_update_callback(device)

View File

@@ -61,7 +61,6 @@ from .const import (
CoverConf,
FanConf,
FanZeroMode,
SceneConf,
)
from .validation import (
backwards_compatible_xknx_climate_enum_member,
@@ -823,7 +822,7 @@ class SceneSchema(KNXPlatformSchema):
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Required(SceneConf.SCENE_NUMBER): vol.All(
vol.Required(CONF_SCENE_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1, max=64)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,

View File

@@ -72,8 +72,5 @@ CONF_GA_WHITE_SWITCH: Final = "ga_white_switch"
CONF_GA_HUE: Final = "ga_hue"
CONF_GA_SATURATION: Final = "ga_saturation"
# Scene
CONF_GA_SCENE: Final = "ga_scene"
# Sensor
CONF_ALWAYS_CALLBACK: Final = "always_callback"

View File

@@ -40,7 +40,6 @@ from ..const import (
CoverConf,
FanConf,
FanZeroMode,
SceneConf,
)
from ..dpt import get_supported_dpts
from .const import (
@@ -83,7 +82,6 @@ from .const import (
CONF_GA_RED_BRIGHTNESS,
CONF_GA_RED_SWITCH,
CONF_GA_SATURATION,
CONF_GA_SCENE,
CONF_GA_SENSOR,
CONF_GA_SETPOINT_SHIFT,
CONF_GA_SPEED,
@@ -421,25 +419,6 @@ LIGHT_KNX_SCHEMA = AllSerializeFirst(
),
)
SCENE_KNX_SCHEMA = vol.Schema(
{
vol.Required(CONF_GA_SCENE): GASelector(
state=False,
passive=False,
write_required=True,
valid_dpt=["17.001", "18.001"],
),
vol.Required(SceneConf.SCENE_NUMBER): AllSerializeFirst(
selector.NumberSelector(
selector.NumberSelectorConfig(
min=1, max=64, step=1, mode=selector.NumberSelectorMode.BOX
)
),
vol.Coerce(int),
),
},
)
SWITCH_KNX_SCHEMA = vol.Schema(
{
vol.Required(CONF_GA_SWITCH): GASelector(write_required=True, valid_dpt="1"),
@@ -715,7 +694,6 @@ KNX_SCHEMA_FOR_PLATFORM = {
Platform.DATETIME: DATETIME_KNX_SCHEMA,
Platform.FAN: FAN_KNX_SCHEMA,
Platform.LIGHT: LIGHT_KNX_SCHEMA,
Platform.SCENE: SCENE_KNX_SCHEMA,
Platform.SENSOR: SENSOR_KNX_SCHEMA,
Platform.SWITCH: SWITCH_KNX_SCHEMA,
Platform.TIME: TIME_KNX_SCHEMA,

View File

@@ -774,19 +774,6 @@
}
}
},
"scene": {
"description": "A KNX entity can activate a KNX scene and updates when the scene number is received.",
"knx": {
"ga_scene": {
"description": "Group address to activate a scene.",
"label": "Scene"
},
"scene_number": {
"description": "The scene number this entity is associated with.",
"label": "Scene number"
}
}
},
"sensor": {
"description": "Read-only entity for numeric or string datapoints. Temperature, percent etc.",
"knx": {

View File

@@ -32,15 +32,15 @@ async def async_migrate_entry(
entity_registry, config_entry.entry_id
)
for reg_entry in registry_entries:
new_unique_id = f"{config_entry.entry_id}_{reg_entry.unique_id[4:]}"
new_entity_id = f"{config_entry.entry_id}_{reg_entry.unique_id[4:]}"
_LOGGER.debug(
"Migrating entity %s unique id from %s to %s",
reg_entry.entity_id,
reg_entry.unique_id,
new_unique_id,
new_entity_id,
)
entity_registry.async_update_entity(
reg_entry.entity_id, new_unique_id=new_unique_id
reg_entry.entity_id, new_unique_id=new_entity_id
)
# Migrate device identifiers

View File

@@ -46,7 +46,7 @@ class LibreHardwareMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
)
try:
computer_name = (await api.get_data()).computer_name
_ = (await api.get_data()).main_device_ids_and_names.values()
except LibreHardwareMonitorConnectionError as exception:
_LOGGER.error(exception)
errors["base"] = "cannot_connect"
@@ -54,7 +54,7 @@ class LibreHardwareMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "no_devices"
else:
return self.async_create_entry(
title=f"{computer_name} ({user_input[CONF_HOST]}:{user_input[CONF_PORT]})",
title=f"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}",
data=user_input,
)

View File

@@ -65,7 +65,7 @@ class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitor
lhm_data = await self._api.get_data()
except LibreHardwareMonitorConnectionError as err:
raise UpdateFailed(
"LibreHardwareMonitor connection failed, will retry", retry_after=30
"LibreHardwareMonitor connection failed, will retry"
) from err
except LibreHardwareMonitorNoDevicesError as err:
raise UpdateFailed("No sensor data available, will retry") from err

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.6.0"]
"requirements": ["librehardwaremonitor-api==1.5.0"]
}

View File

@@ -66,7 +66,7 @@ class LibreHardwareMonitorSensor(
# Hardware device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{entry_id}_{sensor_data.device_id}")},
name=f"[{coordinator.data.computer_name}] {sensor_data.device_name}",
name=sensor_data.device_name,
model=sensor_data.device_type,
)

View File

@@ -29,13 +29,9 @@ CONF_SENSOR_ID = "sensor_id"
ACTIVE_NAME = "Energy Usage"
DAILY_NAME = "Daily Energy Usage"
ACTIVE_GENERATION_NAME = "Energy Production"
DAILY_GENERATION_NAME = "Daily Energy Production"
ACTIVE_TYPE = "active"
DAILY_TYPE = "daily"
ACTIVE_GENERATION_TYPE = "active_generation"
DAILY_GENERATION_TYPE = "daily_generation"
MIN_TIME_BETWEEN_DAILY_UPDATES = timedelta(seconds=150)
@@ -80,18 +76,6 @@ def setup_platform(
add_entities([NeurioEnergy(data, ACTIVE_NAME, ACTIVE_TYPE, update_active)])
# Daily power sensor
add_entities([NeurioEnergy(data, DAILY_NAME, DAILY_TYPE, update_daily)])
# Active generation sensor
add_entities(
[
NeurioEnergy(
data, ACTIVE_GENERATION_NAME, ACTIVE_GENERATION_TYPE, update_active
)
]
)
# Daily generation sensor
add_entities(
[NeurioEnergy(data, DAILY_GENERATION_NAME, DAILY_GENERATION_TYPE, update_daily)]
)
class NeurioData:
@@ -105,8 +89,6 @@ class NeurioData:
self._daily_usage = None
self._active_power = None
self._daily_generation = None
self._active_generation = None
self._state = None
@@ -123,29 +105,17 @@ class NeurioData:
"""Return latest active power value."""
return self._active_power
@property
def daily_generation(self):
"""Return latest daily generation value."""
return self._daily_generation
@property
def active_generation(self):
"""Return latest active generation value."""
return self._active_generation
def get_active_power(self) -> None:
"""Update current power values."""
"""Return current power value."""
try:
sample = self.neurio_client.get_samples_live_last(self.sensor_id)
self._active_power = sample["consumptionPower"]
self._active_generation = sample.get("generationPower")
except (requests.exceptions.RequestException, ValueError, KeyError):
_LOGGER.warning("Could not update current power usage")
def get_daily_usage(self) -> None:
"""Update current daily power usage and generation."""
"""Return current daily power usage."""
kwh = 0
gen_kwh = 0
start_time = dt_util.start_of_local_day().astimezone(dt_util.UTC).isoformat()
end_time = dt_util.utcnow().isoformat()
@@ -161,10 +131,8 @@ class NeurioData:
for result in history:
kwh += result["consumptionEnergy"] / 3600000
gen_kwh += result.get("generationEnergy", 0) / 3600000
self._daily_usage = round(kwh, 2)
self._daily_generation = round(gen_kwh, 2)
class NeurioEnergy(SensorEntity):
@@ -188,16 +156,6 @@ class NeurioEnergy(SensorEntity):
self._unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
self._attr_device_class = SensorDeviceClass.ENERGY
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
elif sensor_type == ACTIVE_GENERATION_TYPE:
self._attr_icon = "mdi:solar-power"
self._unit_of_measurement = UnitOfPower.WATT
self._attr_device_class = SensorDeviceClass.POWER
self._attr_state_class = SensorStateClass.MEASUREMENT
elif sensor_type == DAILY_GENERATION_TYPE:
self._attr_icon = "mdi:solar-power"
self._unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
self._attr_device_class = SensorDeviceClass.ENERGY
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
@property
def name(self):
@@ -222,7 +180,3 @@ class NeurioEnergy(SensorEntity):
self._state = self._data.active_power
elif self._sensor_type == DAILY_TYPE:
self._state = self._data.daily_usage
elif self._sensor_type == ACTIVE_GENERATION_TYPE:
self._state = self._data.active_generation
elif self._sensor_type == DAILY_GENERATION_TYPE:
self._state = self._data.daily_generation

View File

@@ -7,5 +7,5 @@
"iot_class": "cloud_polling",
"loggers": ["pynintendoauth", "pynintendoparental"],
"quality_scale": "bronze",
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.3.0"]
"requirements": ["pynintendoauth==1.0.2", "pynintendoparental==2.1.3"]
}

View File

@@ -115,12 +115,6 @@ async def async_setup_entry(
if entity.enabled:
await entity.query_state()
async def disconnect_callback() -> None:
for entity in entities.values():
if entity.enabled:
entity.cancel_tasks()
entity.async_write_ha_state()
async def update_callback(message: Status) -> None:
if isinstance(message, status.Raw):
return
@@ -152,7 +146,6 @@ async def async_setup_entry(
async_add_entities([zone_entity])
manager.callbacks.connect.append(connect_callback)
manager.callbacks.disconnect.append(disconnect_callback)
manager.callbacks.update.append(update_callback)
@@ -232,13 +225,13 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
await self.query_state()
async def async_will_remove_from_hass(self) -> None:
"""Entity will be removed from hass."""
self.cancel_tasks()
@property
def available(self) -> bool:
"""Return if entity is available."""
return self._manager.connected
"""Cancel the tasks when the entity is removed."""
if self._query_state_task is not None:
self._query_state_task.cancel()
self._query_state_task = None
if self._query_av_info_task is not None:
self._query_av_info_task.cancel()
self._query_av_info_task = None
async def query_state(self) -> None:
"""Query the receiver for all the info, that we care about."""
@@ -254,15 +247,6 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
await self._manager.write(query.AudioInformation())
await self._manager.write(query.VideoInformation())
def cancel_tasks(self) -> None:
"""Cancel the tasks."""
if self._query_state_task is not None:
self._query_state_task.cancel()
self._query_state_task = None
if self._query_av_info_task is not None:
self._query_av_info_task.cancel()
self._query_av_info_task = None
async def async_turn_on(self) -> None:
"""Turn the media player on."""
message = command.Power(self._zone, command.Power.Param.ON)

View File

@@ -30,9 +30,9 @@ rules:
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow:
status: exempt

View File

@@ -28,13 +28,11 @@ class Callbacks:
"""Receiver callbacks."""
connect: list[Callable[[bool], Awaitable[None]]] = field(default_factory=list)
disconnect: list[Callable[[], Awaitable[None]]] = field(default_factory=list)
update: list[Callable[[Status], Awaitable[None]]] = field(default_factory=list)
def clear(self) -> None:
"""Clear all callbacks."""
self.connect.clear()
self.disconnect.clear()
self.update.clear()
@@ -45,7 +43,6 @@ class ReceiverManager:
entry: OnkyoConfigEntry
info: ReceiverInfo
receiver: Receiver | None = None
connected: bool = False
callbacks: Callbacks
_started: asyncio.Event
@@ -86,7 +83,6 @@ class ReceiverManager:
while True:
try:
async with connect(self.info, retry=reconnect) as self.receiver:
self.connected = True
if not reconnect:
self._started.set()
else:
@@ -100,9 +96,7 @@ class ReceiverManager:
reconnect = True
finally:
self.connected = False
_LOGGER.info("Disconnected: %s", self.info)
await self.on_disconnect()
async def on_connect(self, reconnect: bool) -> None:
"""Receiver (re)connected."""
@@ -115,13 +109,8 @@ class ReceiverManager:
for callback in self.callbacks.connect:
await callback(reconnect)
async def on_disconnect(self) -> None:
"""Receiver disconnected."""
for callback in self.callbacks.disconnect:
await callback()
async def on_update(self, message: Status) -> None:
"""New message from the receiver."""
"""Process new message from the receiver."""
for callback in self.callbacks.update:
await callback(message)

View File

@@ -20,5 +20,5 @@
"iot_class": "local_push",
"loggers": ["reolink_aio"],
"quality_scale": "platinum",
"requirements": ["reolink-aio==0.18.0"]
"requirements": ["reolink-aio==0.17.1"]
}

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==3.20.1",
"python-roborock==3.19.0",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -92,8 +92,10 @@ class SamsungTVEntity(CoordinatorEntity[SamsungTVDataUpdateCoordinator], Entity)
LOGGER.debug("Attempting to turn on %s via automation", self.entity_id)
await self._turn_on_action.async_run(self.hass, self._context)
elif self._mac:
LOGGER.debug(
"Attempting to turn on %s via Wake-On-Lan",
LOGGER.warning(
"Attempting to turn on %s via Wake-On-Lan; if this does not work, "
"please ensure that Wake-On-Lan is available for your device or use "
"a turn_on automation",
self.entity_id,
)
await self.hass.async_add_executor_job(self._wake_on_lan)

View File

@@ -12,7 +12,6 @@ from homeassistant.exceptions import ConfigEntryNotReady
from .coordinator import LeilSaunaCoordinator
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.LIGHT,
Platform.SENSOR,

View File

@@ -1,120 +0,0 @@
"""Binary sensor platform for Saunum Leil Sauna Control Unit integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING
from pysaunum import SaunumData
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import LeilSaunaConfigEntry
from .entity import LeilSaunaEntity
if TYPE_CHECKING:
from .coordinator import LeilSaunaCoordinator
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class LeilSaunaBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes Leil Sauna binary sensor entity."""
value_fn: Callable[[SaunumData], bool | None]
BINARY_SENSORS: tuple[LeilSaunaBinarySensorEntityDescription, ...] = (
LeilSaunaBinarySensorEntityDescription(
key="door_open",
device_class=BinarySensorDeviceClass.DOOR,
value_fn=lambda data: data.door_open,
),
LeilSaunaBinarySensorEntityDescription(
key="alarm_door_open",
translation_key="alarm_door_open",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alarm_door_open,
),
LeilSaunaBinarySensorEntityDescription(
key="alarm_door_sensor",
translation_key="alarm_door_sensor",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alarm_door_sensor,
),
LeilSaunaBinarySensorEntityDescription(
key="alarm_thermal_cutoff",
translation_key="alarm_thermal_cutoff",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alarm_thermal_cutoff,
),
LeilSaunaBinarySensorEntityDescription(
key="alarm_internal_temp",
translation_key="alarm_internal_temp",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alarm_internal_temp,
),
LeilSaunaBinarySensorEntityDescription(
key="alarm_temp_sensor_short",
translation_key="alarm_temp_sensor_short",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alarm_temp_sensor_short,
),
LeilSaunaBinarySensorEntityDescription(
key="alarm_temp_sensor_open",
translation_key="alarm_temp_sensor_open",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.alarm_temp_sensor_open,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: LeilSaunaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Saunum Leil Sauna binary sensors from a config entry."""
coordinator = entry.runtime_data
async_add_entities(
LeilSaunaBinarySensorEntity(coordinator, description)
for description in BINARY_SENSORS
if description.value_fn(coordinator.data) is not None
)
class LeilSaunaBinarySensorEntity(LeilSaunaEntity, BinarySensorEntity):
"""Representation of a Saunum Leil Sauna binary sensor."""
entity_description: LeilSaunaBinarySensorEntityDescription
def __init__(
self,
coordinator: LeilSaunaCoordinator,
description: LeilSaunaBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}-{description.key}"
self.entity_description = description
@property
def is_on(self) -> bool | None:
"""Return the state of the binary sensor."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -100,12 +100,6 @@ class LeilSaunaClimate(LeilSaunaEntity, ClimateEntity):
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new HVAC mode."""
if hvac_mode == HVACMode.HEAT and self.coordinator.data.door_open:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="door_open",
)
try:
if hvac_mode == HVACMode.HEAT:
await self.coordinator.client.async_start_session()

View File

@@ -30,26 +30,6 @@
}
},
"entity": {
"binary_sensor": {
"alarm_door_open": {
"name": "Door open during heating alarm"
},
"alarm_door_sensor": {
"name": "Door open too long alarm"
},
"alarm_internal_temp": {
"name": "Internal temperature alarm"
},
"alarm_temp_sensor_open": {
"name": "Temperature sensor disconnected alarm"
},
"alarm_temp_sensor_short": {
"name": "Temperature sensor shorted alarm"
},
"alarm_thermal_cutoff": {
"name": "Thermal cutoff alarm"
}
},
"light": {
"light": {
"name": "[%key:component::light::title%]"
@@ -69,9 +49,6 @@
"communication_error": {
"message": "Communication error: {error}"
},
"door_open": {
"message": "Cannot start sauna session when sauna door is open"
},
"session_not_active": {
"message": "Cannot change fan mode when sauna session is not active"
},

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_push",
"loggers": ["pysmarlaapi", "pysignalr"],
"quality_scale": "bronze",
"requirements": ["pysmarlaapi==0.9.3"]
"requirements": ["pysmarlaapi==0.9.2"]
}

View File

@@ -7,7 +7,6 @@ from datetime import timedelta
import logging
from urllib.parse import ParseResult, urlparse
from aiohttp import CookieJar
from solarlog_cli.solarlog_connector import SolarLogConnector
from solarlog_cli.solarlog_exceptions import (
SolarLogAuthenticationError,
@@ -21,7 +20,7 @@ from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import slugify
@@ -64,9 +63,7 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]):
self.host,
tz=hass.config.time_zone,
password=password,
session=async_create_clientsession(
hass, cookie_jar=CookieJar(quote_cookie=False)
),
session=async_get_clientsession(hass),
)
async def _async_setup(self) -> None:

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
import asyncio
import datetime
from functools import partial
from http import HTTPStatus
from ipaddress import AddressValueError, IPv4Address
import logging
import socket
@@ -13,7 +12,7 @@ from typing import Any, cast
from urllib.parse import urlparse
from aiohttp import ClientError
from requests.exceptions import HTTPError, Timeout
from requests.exceptions import Timeout
from soco import events_asyncio, zonegroupstate
import soco.config as soco_config
from soco.core import SoCo
@@ -55,8 +54,6 @@ from .const import (
SUB_FAIL_ISSUE_ID,
SUB_FAIL_URL,
SUBSCRIPTION_TIMEOUT,
UPNP_DOCUMENTATION_URL,
UPNP_ISSUE_ID,
UPNP_ST,
)
from .exception import SonosUpdateError
@@ -187,32 +184,6 @@ class SonosDiscoveryManager:
"""Check if device at provided IP is known to be invisible."""
return any(x for x in self._known_invisible if x.ip_address == ip_address)
async def _process_http_connection_error(
self, err: HTTPError, ip_address: str
) -> None:
"""Process HTTP Errors when connecting to a Sonos speaker."""
response = err.response
# When UPnP is disabled, Sonos returns HTTP 403 Forbidden error.
# Create issue advising user to enable UPnP on Sonos system.
if response is not None and response.status_code == HTTPStatus.FORBIDDEN:
ir.async_create_issue(
self.hass,
DOMAIN,
f"{UPNP_ISSUE_ID}_{ip_address}",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
translation_key="upnp_disabled",
translation_placeholders={
"device_ip": ip_address,
"documentation_url": UPNP_DOCUMENTATION_URL,
},
)
_LOGGER.error(
"HTTP error connecting to Sonos speaker at %s: %s",
ip_address,
err,
)
async def async_subscribe_to_zone_updates(self, ip_address: str) -> None:
"""Test subscriptions and create SonosSpeakers based on results."""
try:
@@ -224,29 +195,13 @@ class SonosDiscoveryManager:
)
return
soco = SoCo(ip_address)
try:
# Cache now to avoid household ID lookup during first ZoneGroupState processing
await self.hass.async_add_executor_job(
getattr,
soco,
"household_id",
)
sub = await soco.zoneGroupTopology.subscribe()
except HTTPError as err:
await self._process_http_connection_error(err, ip_address)
return
except (
OSError,
SoCoException,
Timeout,
TimeoutError,
) as err:
_LOGGER.error(
"Error connecting to discovered Sonos speaker at %s: %s",
ip_address,
err,
)
return
# Cache now to avoid household ID lookup during first ZoneGroupState processing
await self.hass.async_add_executor_job(
getattr,
soco,
"household_id",
)
sub = await soco.zoneGroupTopology.subscribe()
@callback
def _async_add_visible_zones(subscription_succeeded: bool = False) -> None:
@@ -435,9 +390,6 @@ class SonosDiscoveryManager:
sync_get_visible_zones,
soco,
)
except HTTPError as err:
await self._process_http_connection_error(err, ip_addr)
continue
except (
OSError,
SoCoException,

View File

@@ -20,9 +20,6 @@ PLATFORMS = [
Platform.SWITCH,
]
UPNP_ISSUE_ID = "upnp_disabled"
UPNP_DOCUMENTATION_URL = "https://www.home-assistant.io/integrations/sonos/#403-error-when-setting-up-the-integration"
SUB_FAIL_ISSUE_ID = "subscriptions_failed"
SUB_FAIL_URL = "https://www.home-assistant.io/integrations/sonos/#network-requirements"

View File

@@ -132,10 +132,6 @@
"subscriptions_failed": {
"description": "Falling back to polling, functionality may be limited.\n\nSonos device at {device_ip} cannot reach Home Assistant at {listener_address}.\n\nSee our [documentation]({sub_fail_url}) for more information on how to solve this issue.",
"title": "Networking error: subscriptions failed"
},
"upnp_disabled": {
"description": "Unable to connect to Sonos speaker at {device_ip}.\n\nPlease ensure UPnP is enabled on your Sonos system.\n\nOpen the Sonos app on your phone or tablet. Go to Account > Privacy and Security > UPnP. Enable the UPnP setting. Once UPnP is enabled, return to Home Assistant and reload the Sonos integration. The connection should now succeed. See our [documentation]({documentation_url}) for steps to resolve this issue.",
"title": "Networking error: UPnP disabled"
}
},
"services": {

View File

@@ -1,19 +1,122 @@
"""The component for STIEBEL ELTRON heat pumps with ISGWeb Modbus module."""
import logging
from typing import Any
from pymodbus.client import ModbusTcpClient
from pystiebeleltron.pystiebeleltron import StiebelEltronAPI
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PORT, Platform
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
DEVICE_DEFAULT_NAME,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.typing import ConfigType
from .const import CONF_HUB, DEFAULT_HUB, DOMAIN
MODBUS_DOMAIN = "modbus"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string,
vol.Optional(CONF_HUB, default=DEFAULT_HUB): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
_LOGGER = logging.getLogger(__name__)
_PLATFORMS: list[Platform] = [Platform.CLIMATE]
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
"""Set up the STIEBEL ELTRON component."""
hub_config: dict[str, Any] | None = None
if MODBUS_DOMAIN in config:
for hub in config[MODBUS_DOMAIN]:
if hub[CONF_NAME] == config[DOMAIN][CONF_HUB]:
hub_config = hub
break
if hub_config is None:
ir.async_create_issue(
hass,
DOMAIN,
"deprecated_yaml_import_issue_missing_hub",
breaks_in_ha_version="2025.11.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue_missing_hub",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Stiebel Eltron",
},
)
return
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_HOST: hub_config[CONF_HOST],
CONF_PORT: hub_config[CONF_PORT],
CONF_NAME: config[DOMAIN][CONF_NAME],
},
)
if (
result.get("type") is FlowResultType.ABORT
and result.get("reason") != "already_configured"
):
ir.async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_import_issue_{result['reason']}",
breaks_in_ha_version="2025.11.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Stiebel Eltron",
},
)
return
ir.async_create_issue(
hass,
DOMAIN,
"deprecated_yaml",
breaks_in_ha_version="2025.11.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Stiebel Eltron",
},
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the STIEBEL ELTRON component."""
if DOMAIN in config:
hass.async_create_task(_async_import(hass, config))
return True
type StiebelEltronConfigEntry = ConfigEntry[StiebelEltronAPI]

View File

@@ -276,8 +276,6 @@ async def make_device_data(
"Color Bulb",
"RGBICWW Floor Lamp",
"RGBICWW Strip Light",
"Ceiling Light",
"Ceiling Light Pro",
]:
coordinator = await coordinator_for_device(
hass, entry, api, device, coordinators_by_id

View File

@@ -4,7 +4,6 @@ import asyncio
from typing import Any
from switchbot_api import (
CeilingLightCommands,
CommonCommands,
Device,
Remote,
@@ -54,16 +53,6 @@ class SwitchBotCloudLight(SwitchBotCloudEntity, LightEntity):
_attr_color_mode = ColorMode.UNKNOWN
def _get_default_color_mode(self) -> ColorMode:
"""Return the default color mode."""
if not self.supported_color_modes:
return ColorMode.UNKNOWN
if ColorMode.RGB in self.supported_color_modes:
return ColorMode.RGB
if ColorMode.COLOR_TEMP in self.supported_color_modes:
return ColorMode.COLOR_TEMP
return ColorMode.UNKNOWN
def _set_attributes(self) -> None:
"""Set attributes from coordinator data."""
if self.coordinator.data is None:
@@ -94,9 +83,8 @@ class SwitchBotCloudLight(SwitchBotCloudEntity, LightEntity):
brightness: int | None = kwargs.get("brightness")
rgb_color: tuple[int, int, int] | None = kwargs.get("rgb_color")
color_temp_kelvin: int | None = kwargs.get("color_temp_kelvin")
if brightness is not None:
self._attr_color_mode = self._get_default_color_mode()
self._attr_color_mode = ColorMode.RGB
await self._send_brightness_command(brightness)
elif rgb_color is not None:
self._attr_color_mode = ColorMode.RGB
@@ -105,7 +93,7 @@ class SwitchBotCloudLight(SwitchBotCloudEntity, LightEntity):
self._attr_color_mode = ColorMode.COLOR_TEMP
await self._send_color_temperature_command(color_temp_kelvin)
else:
self._attr_color_mode = self._get_default_color_mode()
self._attr_color_mode = ColorMode.RGB
await self.send_api_command(CommonCommands.ON)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
@@ -161,36 +149,11 @@ class SwitchBotCloudRGBWWLight(SwitchBotCloudLight):
)
class SwitchBotCloudCeilingLight(SwitchBotCloudLight):
"""Representation of SwitchBot Ceiling Light."""
_attr_max_color_temp_kelvin = 6500
_attr_min_color_temp_kelvin = 2700
_attr_supported_color_modes = {ColorMode.COLOR_TEMP}
async def _send_brightness_command(self, brightness: int) -> None:
"""Send a brightness command."""
await self.send_api_command(
CeilingLightCommands.SET_BRIGHTNESS,
parameters=str(value_map_brightness(brightness)),
)
async def _send_color_temperature_command(self, color_temp_kelvin: int) -> None:
"""Send a color temperature command."""
await self.send_api_command(
CeilingLightCommands.SET_COLOR_TEMPERATURE,
parameters=str(color_temp_kelvin),
)
@callback
def _async_make_entity(
api: SwitchBotAPI, device: Device | Remote, coordinator: SwitchBotCoordinator
) -> SwitchBotCloudStripLight | SwitchBotCloudRGBWWLight | SwitchBotCloudCeilingLight:
) -> SwitchBotCloudStripLight | SwitchBotCloudRGBWWLight:
"""Make a SwitchBotCloudLight."""
if device.device_type == "Strip Light":
return SwitchBotCloudStripLight(api, device, coordinator)
if device.device_type in ["Ceiling Light", "Ceiling Light Pro"]:
return SwitchBotCloudCeilingLight(api, device, coordinator)
return SwitchBotCloudRGBWWLight(api, device, coordinator)

View File

@@ -180,7 +180,7 @@ SENSOR_DESCRIPTIONS_BY_DEVICE_TYPES = {
HUMIDITY_DESCRIPTION,
BATTERY_DESCRIPTION,
),
"Presence Sensor": (BATTERY_DESCRIPTION, LIGHTLEVEL_DESCRIPTION),
"Presence Sensor": (BATTERY_DESCRIPTION,),
"Relay Switch 1PM": (
POWER_DESCRIPTION,
VOLTAGE_DESCRIPTION,

View File

@@ -42,6 +42,16 @@ BINARY_SENSORS: tuple[TailscaleBinarySensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
is_on_fn=lambda device: device.key_expiry_disabled,
),
TailscaleBinarySensorEntityDescription(
key="client_supports_hair_pinning",
translation_key="client_supports_hair_pinning",
entity_category=EntityCategory.DIAGNOSTIC,
is_on_fn=lambda device: (
device.client_connectivity.client_supports.hair_pinning
if device.client_connectivity is not None
else None
),
),
TailscaleBinarySensorEntityDescription(
key="client_supports_ipv6",
translation_key="client_supports_ipv6",

View File

@@ -1,6 +1,9 @@
{
"entity": {
"binary_sensor": {
"client_supports_hair_pinning": {
"default": "mdi:wan"
},
"client_supports_ipv6": {
"default": "mdi:wan"
},

View File

@@ -29,6 +29,9 @@
"client": {
"name": "Client"
},
"client_supports_hair_pinning": {
"name": "Supports hairpinning"
},
"client_supports_ipv6": {
"name": "Supports IPv6"
},

View File

@@ -44,12 +44,9 @@ from .const import (
ATTR_CAPTION,
ATTR_CHAT_ACTION,
ATTR_CHAT_ID,
ATTR_DIRECTORY_PATH,
ATTR_DISABLE_NOTIF,
ATTR_DISABLE_WEB_PREV,
ATTR_FILE,
ATTR_FILE_ID,
ATTR_FILE_NAME,
ATTR_IS_ANONYMOUS,
ATTR_IS_BIG,
ATTR_KEYBOARD,
@@ -94,7 +91,6 @@ from .const import (
PLATFORM_WEBHOOKS,
SERVICE_ANSWER_CALLBACK_QUERY,
SERVICE_DELETE_MESSAGE,
SERVICE_DOWNLOAD_FILE,
SERVICE_EDIT_CAPTION,
SERVICE_EDIT_MESSAGE,
SERVICE_EDIT_MESSAGE_MEDIA,
@@ -332,15 +328,6 @@ SERVICE_SCHEMA_SET_MESSAGE_REACTION = vol.Schema(
}
)
SERVICE_SCHEMA_DOWNLOAD_FILE = vol.Schema(
{
vol.Optional(CONF_CONFIG_ENTRY_ID): cv.string,
vol.Required(ATTR_FILE_ID): cv.string,
vol.Optional(ATTR_DIRECTORY_PATH): cv.string,
vol.Optional(ATTR_FILE_NAME): cv.string,
}
)
SERVICE_MAP: dict[str, VolSchemaType] = {
SERVICE_SEND_MESSAGE: SERVICE_SCHEMA_SEND_MESSAGE,
SERVICE_SEND_CHAT_ACTION: SERVICE_SCHEMA_SEND_CHAT_ACTION,
@@ -360,7 +347,6 @@ SERVICE_MAP: dict[str, VolSchemaType] = {
SERVICE_DELETE_MESSAGE: SERVICE_SCHEMA_DELETE_MESSAGE,
SERVICE_LEAVE_CHAT: SERVICE_SCHEMA_LEAVE_CHAT,
SERVICE_SET_MESSAGE_REACTION: SERVICE_SCHEMA_SET_MESSAGE_REACTION,
SERVICE_DOWNLOAD_FILE: SERVICE_SCHEMA_DOWNLOAD_FILE,
}
@@ -456,8 +442,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
await notify_service.set_message_reaction(context=service.context, **kwargs)
elif msgtype == SERVICE_EDIT_MESSAGE_MEDIA:
await notify_service.edit_message_media(context=service.context, **kwargs)
elif msgtype == SERVICE_DOWNLOAD_FILE:
return await notify_service.download_file(context=service.context, **kwargs)
else:
await notify_service.edit_message(
msgtype, context=service.context, **kwargs
@@ -503,7 +487,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
SERVICE_SEND_STICKER,
SERVICE_SEND_LOCATION,
SERVICE_SEND_POLL,
SERVICE_DOWNLOAD_FILE,
]:
supports_response = SupportsResponse.OPTIONAL

View File

@@ -5,8 +5,6 @@ import asyncio
from collections.abc import Callable, Sequence
import io
import logging
import os
from pathlib import Path
from ssl import SSLContext
from types import MappingProxyType
from typing import Any, cast
@@ -15,7 +13,6 @@ import httpx
from telegram import (
Bot,
CallbackQuery,
File,
InlineKeyboardButton,
InlineKeyboardMarkup,
InputMedia,
@@ -48,7 +45,6 @@ from homeassistant.const import (
from homeassistant.core import Context, HomeAssistant
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util.json import JsonValueType
from homeassistant.util.ssl import get_default_context, get_default_no_verify_context
from .const import (
@@ -65,7 +61,6 @@ from .const import (
ATTR_FILE_ID,
ATTR_FILE_MIME_TYPE,
ATTR_FILE_NAME,
ATTR_FILE_PATH,
ATTR_FILE_SIZE,
ATTR_FROM_FIRST,
ATTR_FROM_LAST,
@@ -1042,60 +1037,6 @@ class TelegramNotificationService:
context=context,
)
async def download_file(
self,
file_id: str,
directory_path: str | None = None,
file_name: str | None = None,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> dict[str, JsonValueType]:
"""Download a file from Telegram."""
if not directory_path:
directory_path = self.hass.config.path(DOMAIN)
file: File = await self._send_msg(
self.bot.get_file,
"Error getting file",
None,
file_id=file_id,
context=context,
)
if not file.file_path:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="action_failed",
translation_placeholders={
"error": "No file path returned from Telegram"
},
)
if not file_name:
file_name = os.path.basename(file.file_path)
custom_path = os.path.join(directory_path, file_name)
await self.hass.async_add_executor_job(
self._prepare_download_directory, directory_path
)
_LOGGER.debug("Download file %s to %s", file_id, custom_path)
try:
file_content = await file.download_as_bytearray()
await self.hass.async_add_executor_job(
Path(custom_path).write_bytes, file_content
)
except (RuntimeError, OSError, TelegramError) as exc:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="action_failed",
translation_placeholders={"error": str(exc)},
) from exc
return {ATTR_FILE_PATH: custom_path}
@staticmethod
def _prepare_download_directory(directory_path: str) -> None:
"""Create download directory if it does not exist."""
if not os.path.exists(directory_path):
_LOGGER.debug("directory %s does not exist, creating it", directory_path)
os.makedirs(directory_path, exist_ok=True)
def initialize_bot(hass: HomeAssistant, p_config: MappingProxyType[str, Any]) -> Bot:
"""Initialize telegram bot with proxy support."""

View File

@@ -43,7 +43,6 @@ SERVICE_EDIT_REPLYMARKUP = "edit_replymarkup"
SERVICE_ANSWER_CALLBACK_QUERY = "answer_callback_query"
SERVICE_DELETE_MESSAGE = "delete_message"
SERVICE_LEAVE_CHAT = "leave_chat"
SERVICE_DOWNLOAD_FILE = "download_file"
SIGNAL_UPDATE_EVENT = "telegram_bot_update_event"
EVENT_TELEGRAM_CALLBACK = "telegram_callback"
@@ -84,11 +83,9 @@ ATTR_CHAT_INSTANCE = "chat_instance"
ATTR_DATE = "date"
ATTR_DISABLE_NOTIF = "disable_notification"
ATTR_DISABLE_WEB_PREV = "disable_web_page_preview"
ATTR_DIRECTORY_PATH = "directory_path"
ATTR_EDITED_MSG = "edited_message"
ATTR_FILE = "file"
ATTR_FILE_ID = "file_id"
ATTR_FILE_PATH = "file_path"
ATTR_FILE_MIME_TYPE = "file_mime_type"
ATTR_FILE_NAME = "file_name"
ATTR_FILE_SIZE = "file_size"

View File

@@ -13,9 +13,6 @@
"delete_message": {
"service": "mdi:delete"
},
"download_file": {
"service": "mdi:paperclip"
},
"edit_caption": {
"service": "mdi:pencil"
},

View File

@@ -911,25 +911,3 @@ set_message_reaction:
required: false
selector:
boolean:
download_file:
fields:
config_entry_id:
selector:
config_entry:
integration: telegram_bot
file_id:
required: true
example: "ABCD1234Efgh5678Ijkl90mnopQRStuvwx"
selector:
text:
directory_path:
required: false
default: "/config/telegram_bot"
selector:
text:
file_name:
required: false
example: "my_downloaded_file"
selector:
text:

View File

@@ -324,29 +324,6 @@
},
"name": "Delete message"
},
"download_file": {
"description": "Download the file to a local path.",
"fields": {
"config_entry_id": {
"description": "The config entry representing the Telegram bot to get the file.",
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]"
},
"directory_path": {
"description": "Local directory path to save the file to. Defaults to the 'telegram_bot' directory within your Home Assistant configuration directory.",
"example": "/config/telegram_bot",
"name": "Directory path"
},
"file_id": {
"description": "ID of the file to get.",
"name": "File ID"
},
"file_name": {
"description": "Name to save the file as. If not provided, the original file name will be used.",
"name": "File name"
}
},
"name": "Download file"
},
"edit_caption": {
"description": "Edits the caption of a previously sent message.",
"fields": {

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.3.2", "teslemetry-stream==0.9.0"]
"requirements": ["tesla-fleet-api==1.3.2", "teslemetry-stream==0.8.2"]
}

View File

@@ -40,12 +40,9 @@ STATE_ATTR_TORRENT_INFO = "torrent_info"
ATTR_DELETE_DATA = "delete_data"
ATTR_TORRENT = "torrent"
ATTR_TORRENTS = "torrents"
ATTR_DOWNLOAD_PATH = "download_path"
ATTR_TORRENT_FILTER = "torrent_filter"
SERVICE_ADD_TORRENT = "add_torrent"
SERVICE_GET_TORRENTS = "get_torrents"
SERVICE_REMOVE_TORRENT = "remove_torrent"
SERVICE_START_TORRENT = "start_torrent"
SERVICE_STOP_TORRENT = "stop_torrent"
@@ -57,14 +54,3 @@ EVENT_DOWNLOADED_TORRENT = "transmission_downloaded_torrent"
STATE_UP_DOWN = "up_down"
STATE_SEEDING = "seeding"
STATE_DOWNLOADING = "downloading"
FILTER_MODES: dict[str, list[str] | None] = {
"started": ["downloading"],
"completed": ["seeding"],
"paused": ["stopped"],
"active": [
"seeding",
"downloading",
],
"all": None,
}

View File

@@ -1,45 +0,0 @@
"""Helper functions for Transmission."""
from typing import Any
from transmission_rpc.torrent import Torrent
def format_torrent(torrent: Torrent) -> dict[str, Any]:
"""Format a single torrent."""
value: dict[str, Any] = {}
value["id"] = torrent.id
value["name"] = torrent.name
value["status"] = torrent.status.value
value["percent_done"] = f"{torrent.percent_done * 100:.2f}%"
value["ratio"] = f"{torrent.ratio:.2f}"
value["eta"] = str(torrent.eta) if torrent.eta else None
value["added_date"] = torrent.added_date.isoformat()
value["done_date"] = torrent.done_date.isoformat() if torrent.done_date else None
value["download_dir"] = torrent.download_dir
value["labels"] = torrent.labels
return value
def filter_torrents(
torrents: list[Torrent], statuses: list[str] | None = None
) -> list[Torrent]:
"""Filter torrents based on the statuses provided."""
return [
torrent
for torrent in torrents
if statuses is None or torrent.status in statuses
]
def format_torrents(
torrents: list[Torrent],
) -> dict[str, dict[str, Any]]:
"""Format a list of torrents."""
value = {}
for torrent in torrents:
value[torrent.name] = format_torrent(torrent)
return value

View File

@@ -42,9 +42,6 @@
"add_torrent": {
"service": "mdi:download"
},
"get_torrents": {
"service": "mdi:file-arrow-up-down-outline"
},
"remove_torrent": {
"service": "mdi:download-off"
},

View File

@@ -1,7 +1,7 @@
{
"domain": "transmission",
"name": "Transmission",
"codeowners": ["@engrbm87", "@JPHutchins", "@andrew-codechimp"],
"codeowners": ["@engrbm87", "@JPHutchins"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/transmission",
"integration_type": "service",

View File

@@ -7,6 +7,8 @@ from contextlib import suppress
from dataclasses import dataclass
from typing import Any
from transmission_rpc.torrent import Torrent
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
@@ -18,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .const import (
FILTER_MODES,
STATE_ATTR_TORRENT_INFO,
STATE_DOWNLOADING,
STATE_SEEDING,
@@ -27,10 +28,20 @@ from .const import (
)
from .coordinator import TransmissionConfigEntry, TransmissionDataUpdateCoordinator
from .entity import TransmissionEntity
from .helpers import filter_torrents
PARALLEL_UPDATES = 0
MODES: dict[str, list[str] | None] = {
"started_torrents": ["downloading"],
"completed_torrents": ["seeding"],
"paused_torrents": ["stopped"],
"active_torrents": [
"seeding",
"downloading",
],
"total_torrents": None,
}
@dataclass(frozen=True, kw_only=True)
class TransmissionSensorEntityDescription(SensorEntityDescription):
@@ -73,7 +84,7 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = (
translation_key="active_torrents",
val_func=lambda coordinator: coordinator.data.active_torrent_count,
extra_state_attr_func=lambda coordinator: _torrents_info_attr(
coordinator=coordinator, key="active"
coordinator=coordinator, key="active_torrents"
),
),
TransmissionSensorEntityDescription(
@@ -81,7 +92,7 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = (
translation_key="paused_torrents",
val_func=lambda coordinator: coordinator.data.paused_torrent_count,
extra_state_attr_func=lambda coordinator: _torrents_info_attr(
coordinator=coordinator, key="paused"
coordinator=coordinator, key="paused_torrents"
),
),
TransmissionSensorEntityDescription(
@@ -89,27 +100,27 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = (
translation_key="total_torrents",
val_func=lambda coordinator: coordinator.data.torrent_count,
extra_state_attr_func=lambda coordinator: _torrents_info_attr(
coordinator=coordinator, key="total"
coordinator=coordinator, key="total_torrents"
),
),
TransmissionSensorEntityDescription(
key="completed_torrents",
translation_key="completed_torrents",
val_func=lambda coordinator: len(
filter_torrents(coordinator.torrents, FILTER_MODES["completed"])
_filter_torrents(coordinator.torrents, MODES["completed_torrents"])
),
extra_state_attr_func=lambda coordinator: _torrents_info_attr(
coordinator=coordinator, key="completed"
coordinator=coordinator, key="completed_torrents"
),
),
TransmissionSensorEntityDescription(
key="started_torrents",
translation_key="started_torrents",
val_func=lambda coordinator: len(
filter_torrents(coordinator.torrents, FILTER_MODES["started"])
_filter_torrents(coordinator.torrents, MODES["started_torrents"])
),
extra_state_attr_func=lambda coordinator: _torrents_info_attr(
coordinator=coordinator, key="started"
coordinator=coordinator, key="started_torrents"
),
),
)
@@ -158,11 +169,21 @@ def get_state(upload: int, download: int) -> str:
return STATE_IDLE
def _filter_torrents(
torrents: list[Torrent], statuses: list[str] | None = None
) -> list[Torrent]:
return [
torrent
for torrent in torrents
if statuses is None or torrent.status in statuses
]
def _torrents_info_attr(
coordinator: TransmissionDataUpdateCoordinator, key: str
) -> dict[str, Any]:
infos = {}
torrents = filter_torrents(coordinator.torrents, FILTER_MODES.get(key))
torrents = _filter_torrents(coordinator.torrents, MODES[key])
torrents = SUPPORTED_ORDER_MODES[coordinator.order](torrents)
for torrent in torrents[: coordinator.limit]:
info = infos[torrent.name] = {

View File

@@ -1,16 +1,14 @@
"""Define services for the Transmission integration."""
from enum import StrEnum
from functools import partial
import logging
from typing import Any, cast
from typing import cast
from transmission_rpc import Torrent
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, selector
@@ -18,34 +16,18 @@ from .const import (
ATTR_DELETE_DATA,
ATTR_DOWNLOAD_PATH,
ATTR_TORRENT,
ATTR_TORRENT_FILTER,
ATTR_TORRENTS,
CONF_ENTRY_ID,
DEFAULT_DELETE_DATA,
DOMAIN,
FILTER_MODES,
SERVICE_ADD_TORRENT,
SERVICE_GET_TORRENTS,
SERVICE_REMOVE_TORRENT,
SERVICE_START_TORRENT,
SERVICE_STOP_TORRENT,
)
from .coordinator import TransmissionDataUpdateCoordinator
from .helpers import filter_torrents, format_torrents
_LOGGER = logging.getLogger(__name__)
class TorrentFilter(StrEnum):
"""TorrentFilter model."""
ALL = "all"
STARTED = "started"
COMPLETED = "completed"
PAUSED = "paused"
ACTIVE = "active"
SERVICE_BASE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTRY_ID): selector.ConfigEntrySelector(
@@ -63,16 +45,6 @@ SERVICE_ADD_TORRENT_SCHEMA = vol.All(
),
)
SERVICE_GET_TORRENTS_SCHEMA = vol.All(
SERVICE_BASE_SCHEMA.extend(
{
vol.Required(ATTR_TORRENT_FILTER): vol.In(
[x.lower() for x in TorrentFilter]
),
}
),
)
SERVICE_REMOVE_TORRENT_SCHEMA = vol.All(
SERVICE_BASE_SCHEMA.extend(
{
@@ -139,24 +111,6 @@ async def _async_add_torrent(service: ServiceCall) -> None:
await coordinator.async_request_refresh()
async def _async_get_torrents(service: ServiceCall) -> dict[str, Any] | None:
"""Get torrents."""
coordinator = _get_coordinator_from_service_data(service)
torrent_filter: str = service.data[ATTR_TORRENT_FILTER]
def get_filtered_torrents() -> list[Torrent]:
"""Filter torrents based on the filter provided."""
all_torrents = coordinator.api.get_torrents()
return filter_torrents(all_torrents, FILTER_MODES[torrent_filter])
torrents = await service.hass.async_add_executor_job(get_filtered_torrents)
info = format_torrents(torrents)
return {
ATTR_TORRENTS: info,
}
async def _async_start_torrent(service: ServiceCall) -> None:
"""Start torrent."""
coordinator = _get_coordinator_from_service_data(service)
@@ -195,14 +149,6 @@ def async_setup_services(hass: HomeAssistant) -> None:
schema=SERVICE_ADD_TORRENT_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_GET_TORRENTS,
_async_get_torrents,
schema=SERVICE_GET_TORRENTS_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
hass.services.async_register(
DOMAIN,
SERVICE_REMOVE_TORRENT,

View File

@@ -16,27 +16,6 @@ add_torrent:
selector:
text:
get_torrents:
fields:
entry_id:
required: true
selector:
config_entry:
integration: transmission
torrent_filter:
required: true
example: "all"
default: "all"
selector:
select:
options:
- "all"
- "active"
- "started"
- "paused"
- "completed"
translation_key: torrent_filter
remove_torrent:
fields:
entry_id:

View File

@@ -120,15 +120,6 @@
"oldest_first": "Oldest first",
"worst_ratio_first": "Worst ratio first"
}
},
"torrent_filter": {
"options": {
"active": "Active",
"all": "All",
"completed": "Completed",
"paused": "Paused",
"started": "Started"
}
}
},
"services": {
@@ -150,20 +141,6 @@
},
"name": "Add torrent"
},
"get_torrents": {
"description": "Get a list of current torrents",
"fields": {
"entry_id": {
"description": "[%key:component::transmission::services::add_torrent::fields::entry_id::description%]",
"name": "[%key:component::transmission::services::add_torrent::fields::entry_id::name%]"
},
"torrent_filter": {
"description": "What kind of torrents you want to return, such as All or Active.",
"name": "Torrent filter"
}
},
"name": "Get torrents"
},
"remove_torrent": {
"description": "Removes a torrent.",
"fields": {

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
import asyncio
from calendar import monthrange
from datetime import date, datetime, timedelta
from enum import Enum
from typing import Any, NoReturn, cast
@@ -95,12 +94,11 @@ async def async_get_media_source(hass: HomeAssistant) -> MediaSource:
@callback
def _get_month_start_end(start: datetime) -> tuple[datetime, datetime]:
"""Get the first day of the month for start and current time."""
start = dt_util.as_local(start)
end = dt_util.now()
start = start.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
end = end.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
start = start.replace(day=1, hour=0, minute=0, second=1, microsecond=0)
end = end.replace(day=1, hour=0, minute=0, second=2, microsecond=0)
return start, end
@@ -115,19 +113,20 @@ def _bad_identifier(identifier: str, err: Exception | None = None) -> NoReturn:
@callback
def _format_duration(duration: timedelta) -> str:
formatted = ""
seconds = int(duration.total_seconds())
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
parts = []
if hours > 0:
parts.append(f"{hours}h")
if minutes > 0:
parts.append(f"{minutes}m")
if seconds > 3600:
hours = seconds // 3600
formatted += f"{hours}h "
seconds -= hours * 3600
if seconds > 60:
minutes = seconds // 60
formatted += f"{minutes}m "
seconds -= minutes * 60
if seconds > 0:
parts.append(f"{seconds}s")
formatted += f"{seconds}s "
return " ".join(parts) if parts else "0s"
return formatted.strip()
@callback
@@ -594,8 +593,7 @@ class ProtectMediaSource(MediaSource):
start = max(recording_start, start)
recording_end = dt_util.now().date()
end = start.replace(day=monthrange(start.year, start.month)[1])
end = start.replace(month=start.month + 1) - timedelta(days=1)
end = min(recording_end, end)
children = [self._build_days(data, camera_id, event_type, start, is_all=True)]
@@ -662,9 +660,10 @@ class ProtectMediaSource(MediaSource):
tzinfo=dt_util.get_default_time_zone(),
)
if is_all:
# Move to first day of next month
days_in_month = monthrange(start_dt.year, start_dt.month)[1]
end_dt = start_dt + timedelta(days=days_in_month)
if start_dt.month < 12:
end_dt = start_dt.replace(month=start_dt.month + 1)
else:
end_dt = start_dt.replace(year=start_dt.year + 1, month=1)
else:
end_dt = start_dt + timedelta(hours=24)
@@ -727,7 +726,7 @@ class ProtectMediaSource(MediaSource):
]
start, end = _get_month_start_end(data.api.bootstrap.recording_start)
while end >= start:
while end > start:
children.append(self._build_month(data, camera_id, event_type, end.date()))
end = (end - timedelta(days=1)).replace(day=1)

View File

@@ -1,26 +1,41 @@
{
"entity": {
"sensor": {
"data_received": {
"default": "mdi:server-network"
},
"data_sent": {
"default": "mdi:server-network"
},
"download_speed": {
"default": "mdi:server-network"
},
"external_ip": {
"default": "mdi:ip"
"default": "mdi:server-network"
},
"packet_download_speed": {
"default": "mdi:transmission-tower"
"default": "mdi:server-network"
},
"packet_upload_speed": {
"default": "mdi:transmission-tower"
"default": "mdi:server-network"
},
"packets_received": {
"default": "mdi:database"
"default": "mdi:server-network"
},
"packets_sent": {
"default": "mdi:database"
"default": "mdi:server-network"
},
"port_mapping_number_of_entries_ipv4": {
"default": "mdi:server-network"
},
"upload_speed": {
"default": "mdi:server-network"
},
"uptime": {
"default": "mdi:server-network"
},
"wan_status": {
"default": "mdi:network"
"default": "mdi:server-network"
}
}
}

View File

@@ -23,7 +23,7 @@ rules:
unique-config-entry: done
# Silver
action-exceptions: done
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo

View File

@@ -2,15 +2,11 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from velbusaio.channels import ButtonCounter, LightSensor, SensorNumber, Temperature
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.core import HomeAssistant
@@ -21,48 +17,6 @@ from .entity import VelbusEntity
PARALLEL_UPDATES = 0
type VelbusSensorChannel = ButtonCounter | Temperature | LightSensor | SensorNumber
@dataclass(frozen=True, kw_only=True)
class VelbusSensorEntityDescription(SensorEntityDescription):
"""Describes Velbus sensor entity."""
value_fn: Callable[[VelbusSensorChannel], float | None] = lambda channel: float(
channel.get_state()
)
unit_fn: Callable[[VelbusSensorChannel], str | None] = (
lambda channel: channel.get_unit()
)
unique_id_suffix: str = ""
SENSOR_DESCRIPTIONS: dict[str, VelbusSensorEntityDescription] = {
"power": VelbusSensorEntityDescription(
key="power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
"temperature": VelbusSensorEntityDescription(
key="temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
"measurement": VelbusSensorEntityDescription(
key="measurement",
state_class=SensorStateClass.MEASUREMENT,
),
"counter": VelbusSensorEntityDescription(
key="counter",
device_class=SensorDeviceClass.ENERGY,
icon="mdi:counter",
state_class=SensorStateClass.TOTAL_INCREASING,
value_fn=lambda channel: float(channel.get_counter_state()),
unit_fn=lambda channel: channel.get_counter_unit(),
unique_id_suffix="-counter",
),
}
async def async_setup_entry(
hass: HomeAssistant,
@@ -71,51 +25,50 @@ async def async_setup_entry(
) -> None:
"""Set up Velbus switch based on config_entry."""
await entry.runtime_data.scan_task
entities: list[VelbusSensor] = []
entities = []
for channel in entry.runtime_data.controller.get_all_sensor():
# Determine which description to use for the main sensor
entities.append(VelbusSensor(channel))
if channel.is_counter_channel():
description = SENSOR_DESCRIPTIONS["power"]
elif channel.is_temperature():
description = SENSOR_DESCRIPTIONS["temperature"]
else:
description = SENSOR_DESCRIPTIONS["measurement"]
entities.append(VelbusSensor(channel, description))
# Add counter entity if applicable
if channel.is_counter_channel():
entities.append(
VelbusSensor(channel, SENSOR_DESCRIPTIONS["counter"], is_counter=True)
)
entities.append(VelbusSensor(channel, True))
async_add_entities(entities)
class VelbusSensor(VelbusEntity, SensorEntity):
"""Representation of a sensor."""
_channel: VelbusSensorChannel
entity_description: VelbusSensorEntityDescription
_channel: ButtonCounter | Temperature | LightSensor | SensorNumber
def __init__(
self,
channel: VelbusSensorChannel,
description: VelbusSensorEntityDescription,
is_counter: bool = False,
channel: ButtonCounter | Temperature | LightSensor | SensorNumber,
counter: bool = False,
) -> None:
"""Initialize a sensor Velbus entity."""
super().__init__(channel)
self.entity_description = description
self._is_counter = is_counter
self._attr_native_unit_of_measurement = description.unit_fn(channel)
self._attr_unique_id = f"{self._attr_unique_id}{description.unique_id_suffix}"
# Modify name for counter entities
if is_counter:
self._is_counter: bool = counter
if self._is_counter:
self._attr_device_class = SensorDeviceClass.ENERGY
self._attr_icon = "mdi:counter"
self._attr_name = f"{self._attr_name}-counter"
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
self._attr_unique_id = f"{self._attr_unique_id}-counter"
elif channel.is_counter_channel():
self._attr_device_class = SensorDeviceClass.POWER
self._attr_state_class = SensorStateClass.MEASUREMENT
elif channel.is_temperature():
self._attr_device_class = SensorDeviceClass.TEMPERATURE
self._attr_state_class = SensorStateClass.MEASUREMENT
else:
self._attr_state_class = SensorStateClass.MEASUREMENT
# unit
if self._is_counter:
self._attr_native_unit_of_measurement = channel.get_counter_unit()
else:
self._attr_native_unit_of_measurement = channel.get_unit()
@property
def native_value(self) -> float | int | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self._channel)
if self._is_counter:
return float(self._channel.get_counter_state())
return float(self._channel.get_state())

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from contextlib import suppress
import os
import shutil
from typing import TYPE_CHECKING
@@ -11,7 +12,7 @@ import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_ADDRESS
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, selector
from homeassistant.helpers.storage import STORAGE_DIR
@@ -35,7 +36,8 @@ def async_setup_services(hass: HomeAssistant) -> None:
async def get_config_entry(call: ServiceCall) -> VelbusConfigEntry:
"""Get the config entry for this service call."""
entry_id: str = call.data[CONF_CONFIG_ENTRY]
if CONF_CONFIG_ENTRY in call.data:
entry_id = call.data[CONF_CONFIG_ENTRY]
if not (entry := hass.config_entries.async_get_entry(entry_id)):
raise ServiceValidationError(
translation_domain=DOMAIN,
@@ -53,52 +55,26 @@ def async_setup_services(hass: HomeAssistant) -> None:
async def scan(call: ServiceCall) -> None:
"""Handle a scan service call."""
entry = await get_config_entry(call)
try:
await entry.runtime_data.controller.scan()
except OSError as exc:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="scan_failed",
translation_placeholders={"error": str(exc)},
) from exc
await entry.runtime_data.controller.scan()
async def syn_clock(call: ServiceCall) -> None:
"""Handle a sync clock service call."""
entry = await get_config_entry(call)
try:
await entry.runtime_data.controller.sync_clock()
except OSError as exc:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_clock_failed",
translation_placeholders={"error": str(exc)},
) from exc
await entry.runtime_data.controller.sync_clock()
async def set_memo_text(call: ServiceCall) -> None:
"""Handle Memo Text service call."""
entry = await get_config_entry(call)
memo_text = call.data[CONF_MEMO_TEXT]
address = call.data[CONF_ADDRESS]
module = entry.runtime_data.controller.get_module(address)
module = entry.runtime_data.controller.get_module(call.data[CONF_ADDRESS])
if not module:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="module_not_found",
translation_placeholders={"address": str(address)},
)
try:
await module.set_memo_text(memo_text)
except OSError as exc:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_memo_text_failed",
translation_placeholders={"error": str(exc)},
) from exc
raise ServiceValidationError("Module not found")
await module.set_memo_text(memo_text)
async def clear_cache(call: ServiceCall) -> None:
"""Handle a clear cache service call."""
entry = await get_config_entry(call)
try:
with suppress(FileNotFoundError):
if call.data.get(CONF_ADDRESS):
await hass.async_add_executor_job(
os.unlink,
@@ -112,14 +88,6 @@ def async_setup_services(hass: HomeAssistant) -> None:
shutil.rmtree,
hass.config.path(STORAGE_DIR, f"velbuscache-{entry.entry_id}/"),
)
except FileNotFoundError:
pass # It's okay if the file doesn't exist
except OSError as exc:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="clear_cache_failed",
translation_placeholders={"error": str(exc)},
) from exc
# call a scan to repopulate
await scan(call)

View File

@@ -57,29 +57,14 @@
}
},
"exceptions": {
"clear_cache_failed": {
"message": "Could not cleat the Velbus cache: {error}"
},
"integration_not_found": {
"message": "Integration \"{target}\" not found in registry."
},
"invalid_hvac_mode": {
"message": "Climate mode {hvac_mode} is not supported."
},
"module_not_found": {
"message": "Module with address {address} not found."
},
"not_loaded": {
"message": "{target} is not loaded."
},
"scan_failed": {
"message": "Scan service: {error}."
},
"set_memo_text_failed": {
"message": "Failed to set the memo text on the Velbus module: {error}."
},
"sync_clock_failed": {
"message": "Failed to sync the Velbus clock: {error}."
}
},
"issues": {

View File

@@ -12,12 +12,7 @@ from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
ServiceValidationError,
)
from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
@@ -53,15 +48,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
# (this is no change to the previous behavior, the alternative would be to reboot all)
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.state is ConfigEntryState.LOADED:
try:
await entry.runtime_data.reboot_gateway()
except (OSError, PyVLXException) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="reboot_failed",
) from err
else:
return
await entry.runtime_data.reboot_gateway()
return
raise ServiceValidationError(
translation_domain=DOMAIN,
@@ -86,18 +74,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: VeluxConfigEntry) -> boo
LOGGER.debug("Retrieving nodes from %s", host)
await pyvlx.load_nodes()
except (OSError, PyVLXException) as ex:
# Since pyvlx raises the same exception for auth and connection errors,
# we need to check the exception message to distinguish them.
# Ultimately this should be fixed in pyvlx to raise specialized exceptions,
# right now it's been a while since the last pyvlx release, so we do this workaround here.
if (
isinstance(ex, PyVLXException)
and ex.description == "Login to KLF 200 failed, check credentials"
):
raise ConfigEntryAuthFailed(
f"Invalid authentication for Velux gateway at {host}"
) from ex
# Defer setup and retry later as the bridge is not ready/available
raise ConfigEntryNotReady(
f"Unable to connect to Velux gateway at {host}. "

View File

@@ -1,6 +1,5 @@
"""Config flow for Velux integration."""
from collections.abc import Mapping
from typing import Any
from pyvlx import PyVLX, PyVLXException
@@ -29,15 +28,6 @@ async def _check_connection(host: str, password: str) -> dict[str, Any]:
await pyvlx.connect()
await pyvlx.disconnect()
except (PyVLXException, ConnectionError) as err:
# since pyvlx raises the same exception for auth and connection errors,
# we need to check the exception message to distinguish them
if (
isinstance(err, PyVLXException)
and err.description == "Login to KLF 200 failed, check credentials"
):
LOGGER.debug("Invalid password")
return {"base": "invalid_auth"}
LOGGER.debug("Cannot connect: %s", err)
return {"base": "cannot_connect"}
except Exception as err: # noqa: BLE001
@@ -79,42 +69,6 @@ class VeluxConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauth flow."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
"""Handle reauth flow when password has changed."""
reauth_entry = self._get_reauth_entry()
errors: dict[str, str] = {}
if user_input is not None:
errors = await _check_connection(
reauth_entry.data[CONF_HOST], user_input[CONF_PASSWORD]
)
if not errors:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
}
),
errors=errors,
description_placeholders={
"host": reauth_entry.data[CONF_HOST],
},
)
async def async_step_dhcp(
self, discovery_info: DhcpServiceInfo
) -> ConfigFlowResult:

View File

@@ -30,7 +30,7 @@ rules:
integration-owner: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: done
reauthentication-flow: todo
test-coverage:
status: todo
comment: add tests where missing

View File

@@ -1,12 +1,10 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
@@ -19,15 +17,6 @@
},
"description": "Please enter the password for {name} ({host})"
},
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "The password for your KLF200 gateway."
},
"description": "The password for {host} is incorrect. Please enter the correct password."
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",

View File

@@ -7,18 +7,15 @@ from pyvesync.utils.errors import VeSyncLoginError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import DOMAIN, VS_COORDINATOR, VS_MANAGER
from .const import DOMAIN, SERVICE_UPDATE_DEVS, VS_COORDINATOR, VS_MANAGER
from .coordinator import VeSyncDataCoordinator
from .services import async_setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
PLATFORMS = [
Platform.BINARY_SENSOR,
@@ -35,14 +32,6 @@ PLATFORMS = [
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up my integration."""
async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up Vesync as config entry."""
username = config_entry.data[CONF_USERNAME]
@@ -73,6 +62,22 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
async def async_new_device_discovery(service: ServiceCall) -> None:
"""Discover and add new devices."""
manager = hass.data[DOMAIN][VS_MANAGER]
known_devices = list(manager.devices)
await manager.get_devices()
new_devices = [
device for device in manager.devices if device not in known_devices
]
if new_devices:
async_dispatcher_send(hass, "vesync_new_devices", new_devices)
hass.services.async_register(
DOMAIN, SERVICE_UPDATE_DEVS, async_new_device_discovery
)
return True

View File

@@ -1,36 +0,0 @@
"""Support for VeSync Services."""
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import DOMAIN, SERVICE_UPDATE_DEVS, VS_DEVICES, VS_DISCOVERY, VS_MANAGER
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Handle for services."""
hass.services.async_register(
DOMAIN, SERVICE_UPDATE_DEVS, async_new_device_discovery
)
async def async_new_device_discovery(call: ServiceCall) -> None:
"""Discover and add new devices."""
entries = call.hass.config_entries.async_entries(DOMAIN)
entry = entries[0] if entries else None
if not entry:
raise ServiceValidationError("Entry not found")
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError("Entry not loaded")
manager = call.hass.data[DOMAIN][VS_MANAGER]
known_devices = list(manager.devices)
await manager.get_devices()
new_devices = [device for device in manager.devices if device not in known_devices]
if new_devices:
async_dispatcher_send(call.hass, VS_DISCOVERY.format(VS_DEVICES), new_devices)

View File

@@ -13,7 +13,11 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from .const import CONF_BACKUP_PATH, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
from .helpers import async_create_client, async_ensure_path_exists
from .helpers import (
async_create_client,
async_ensure_path_exists,
async_migrate_wrong_folder_path,
)
type WebDavConfigEntry = ConfigEntry[Client]
@@ -47,6 +51,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: WebDavConfigEntry) -> bo
)
path = entry.data.get(CONF_BACKUP_PATH, "/")
await async_migrate_wrong_folder_path(client, path)
# Ensure the backup directory exists
if not await async_ensure_path_exists(client, path):

View File

@@ -3,10 +3,14 @@
import logging
from aiowebdav2.client import Client, ClientOptions
from aiowebdav2.exceptions import WebDavError
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -40,3 +44,25 @@ async def async_ensure_path_exists(client: Client, path: str) -> bool:
return False
return True
async def async_migrate_wrong_folder_path(client: Client, path: str) -> None:
"""Migrate the wrong encoded folder path to the correct one."""
wrong_path = path.replace(" ", "%20")
# migrate folder when the old folder exists
if wrong_path != path and await client.check(wrong_path):
try:
await client.move(wrong_path, path)
except WebDavError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="failed_to_migrate_folder",
translation_placeholders={
"wrong_path": wrong_path,
"correct_path": path,
},
) from err
_LOGGER.debug(
"Migrated wrong encoded folder path from %s to %s", wrong_path, path
)

View File

@@ -33,6 +33,9 @@
"cannot_connect": {
"message": "Cannot connect to WebDAV server"
},
"failed_to_migrate_folder": {
"message": "Failed to migrate wrong encoded folder \"{wrong_path}\" to \"{correct_path}\"."
},
"invalid_username_password": {
"message": "Invalid username or password"
}

View File

@@ -2,11 +2,6 @@
from __future__ import annotations
import asyncio
import logging
from typing import TYPE_CHECKING
from homeassistant.config_entries import SOURCE_IGNORE
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
@@ -18,11 +13,8 @@ from .coordinator import (
WLEDConfigEntry,
WLEDDataUpdateCoordinator,
WLEDReleasesDataUpdateCoordinator,
normalize_mac_address,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = (
Platform.BUTTON,
Platform.LIGHT,
@@ -71,69 +63,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: WLEDConfigEntry) -> boo
coordinator.unsub()
return unload_ok
async def async_migrate_entry(
hass: HomeAssistant, config_entry: WLEDConfigEntry
) -> bool:
"""Migrate old entry."""
_LOGGER.debug(
"Migrating configuration from version %s.%s",
config_entry.version,
config_entry.minor_version,
)
if config_entry.version > 1:
# The user has downgraded from a future version
return False
if config_entry.version == 1:
if config_entry.minor_version < 2:
# 1.2: Normalize unique ID to be lowercase MAC address without separators.
# This matches the format used by WLED firmware.
if TYPE_CHECKING:
assert config_entry.unique_id
normalized_mac_address = normalize_mac_address(config_entry.unique_id)
duplicate_entries = [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.unique_id
and normalize_mac_address(entry.unique_id) == normalized_mac_address
]
ignored_entries = [
entry
for entry in duplicate_entries
if entry.entry_id != config_entry.entry_id
and entry.source == SOURCE_IGNORE
]
if ignored_entries:
_LOGGER.info(
"Found %d ignored WLED config entries with the same MAC address, removing them",
len(ignored_entries),
)
await asyncio.gather(
*[
hass.config_entries.async_remove(entry.entry_id)
for entry in ignored_entries
]
)
if len(duplicate_entries) - len(ignored_entries) > 1:
_LOGGER.warning(
"Found multiple WLED config entries with the same MAC address, cannot migrate to version 1.2"
)
return False
hass.config_entries.async_update_entry(
config_entry,
unique_id=normalized_mac_address,
version=1,
minor_version=2,
)
_LOGGER.debug(
"Migration to configuration version %s.%s successful",
config_entry.version,
config_entry.minor_version,
)
return True

Some files were not shown because too many files have changed in this diff Show More