mirror of
https://github.com/home-assistant/core.git
synced 2025-11-19 16:00:11 +00:00
Compare commits
96 Commits
mqtt-suben
...
dev_target
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8dae522043 | ||
|
|
6dc62073e5 | ||
|
|
f9fd6826ce | ||
|
|
f71653f4d7 | ||
|
|
3f4334f4a1 | ||
|
|
6ee8724a8a | ||
|
|
2e2c8d1b12 | ||
|
|
06decf9760 | ||
|
|
8337f1575a | ||
|
|
4b69543515 | ||
|
|
456d55a0e0 | ||
|
|
97ef4a35b9 | ||
|
|
f782c78650 | ||
|
|
139ed34c74 | ||
|
|
7f14d013ac | ||
|
|
963e27dda4 | ||
|
|
b8e3d57fea | ||
|
|
3ce6442472 | ||
|
|
bf46bfb554 | ||
|
|
0de2a16d0f | ||
|
|
c8c2413a09 | ||
|
|
291331f878 | ||
|
|
a13cdbdf3d | ||
|
|
1bf713f279 | ||
|
|
10c8ee417b | ||
|
|
b23134f4f1 | ||
|
|
6041894b41 | ||
|
|
f45a6f806b | ||
|
|
d3857a00d5 | ||
|
|
bd07f74cf8 | ||
|
|
8c9b90a9f9 | ||
|
|
4eedc88935 | ||
|
|
343ea1b82d | ||
|
|
36e13653d2 | ||
|
|
80444b2165 | ||
|
|
262f06dd2b | ||
|
|
bd87119c2e | ||
|
|
0dfa037aa8 | ||
|
|
c32a471573 | ||
|
|
128ff4004c | ||
|
|
97b7e51171 | ||
|
|
433712b407 | ||
|
|
5d87e0f429 | ||
|
|
acb087f1e5 | ||
|
|
10c12623bf | ||
|
|
2fe20553b3 | ||
|
|
b431bb197a | ||
|
|
eb9d625926 | ||
|
|
3a69534b09 | ||
|
|
8f2cedcb73 | ||
|
|
50afba3958 | ||
|
|
3658953ff3 | ||
|
|
14088a67f2 | ||
|
|
212c8f2688 | ||
|
|
e29b9026ab | ||
|
|
0be5893e37 | ||
|
|
c87e38c4cf | ||
|
|
4874610ad6 | ||
|
|
9180282fc6 | ||
|
|
118f30f32e | ||
|
|
bd10da126f | ||
|
|
b73a7928ca | ||
|
|
3e20c2ea93 | ||
|
|
60130d3d68 | ||
|
|
c45ede2e5d | ||
|
|
e167061f53 | ||
|
|
5560fb6c9e | ||
|
|
9808b6c961 | ||
|
|
e8cfde579e | ||
|
|
f695fb4d51 | ||
|
|
0c8dda1956 | ||
|
|
edf82db057 | ||
|
|
37644511f6 | ||
|
|
3685d0f7c2 | ||
|
|
3dabfeb329 | ||
|
|
8e7d2d7108 | ||
|
|
2fe4a1164b | ||
|
|
05175294f6 | ||
|
|
e2ddfb8782 | ||
|
|
f1cc133ff6 | ||
|
|
0cf97cf577 | ||
|
|
38cea2e5f0 | ||
|
|
71876d5b34 | ||
|
|
0f780254e1 | ||
|
|
9e40972b11 | ||
|
|
07ef61dd8d | ||
|
|
1bf6771a54 | ||
|
|
e7a7cb829e | ||
|
|
6f6b2f1ad3 | ||
|
|
1cc4890f75 | ||
|
|
d3dd9b26c9 | ||
|
|
a64d61df05 | ||
|
|
e7c6c5311d | ||
|
|
72a524c868 | ||
|
|
b437113f31 | ||
|
|
e0e263d3b5 |
14
.github/workflows/builder.yml
vendored
14
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -227,7 +227,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -265,7 +265,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
@@ -418,7 +418,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
@@ -463,7 +463,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
|
||||
@@ -87,7 +87,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(conditions|quality_scale|services|triggers)\.yaml|homeassistant/brands/.*\.json|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
|
||||
- id: hassfest-metadata
|
||||
name: hassfest-metadata
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
|
||||
|
||||
@@ -579,6 +579,7 @@ homeassistant.components.wiz.*
|
||||
homeassistant.components.wled.*
|
||||
homeassistant.components.workday.*
|
||||
homeassistant.components.worldclock.*
|
||||
homeassistant.components.xbox.*
|
||||
homeassistant.components.xiaomi_ble.*
|
||||
homeassistant.components.yale_smart_alarm.*
|
||||
homeassistant.components.yalexs_ble.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1736,6 +1736,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_ble/ @rajlaud
|
||||
/tests/components/victron_ble/ @rajlaud
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
|
||||
5
homeassistant/brands/victron.json
Normal file
5
homeassistant/brands/victron.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "victron",
|
||||
"name": "Victron",
|
||||
"integrations": ["victron_ble", "victron_remote_monitoring"]
|
||||
}
|
||||
@@ -45,7 +45,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH]
|
||||
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
|
||||
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
|
||||
|
||||
|
||||
|
||||
71
homeassistant/components/adguard/update.py
Normal file
71
homeassistant/components/adguard/update.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""AdGuard Home Update platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from adguardhome import AdGuardHomeError
|
||||
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AdGuardConfigEntry, AdGuardData
|
||||
from .const import DOMAIN
|
||||
from .entity import AdGuardHomeEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdGuardConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AdGuard Home update entity based on a config entry."""
|
||||
data = entry.runtime_data
|
||||
|
||||
if (await data.client.update.update_available()).disabled:
|
||||
return
|
||||
|
||||
async_add_entities([AdGuardHomeUpdate(data, entry)], True)
|
||||
|
||||
|
||||
class AdGuardHomeUpdate(AdGuardHomeEntity, UpdateEntity):
|
||||
"""Defines an AdGuard Home update."""
|
||||
|
||||
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: AdGuardData,
|
||||
entry: AdGuardConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize AdGuard Home update."""
|
||||
super().__init__(data, entry)
|
||||
|
||||
self._attr_unique_id = "_".join(
|
||||
[DOMAIN, self.adguard.host, str(self.adguard.port), "update"]
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
value = await self.adguard.update.update_available()
|
||||
self._attr_installed_version = self.data.version
|
||||
self._attr_latest_version = value.new_version
|
||||
self._attr_release_summary = value.announcement
|
||||
self._attr_release_url = value.announcement_url
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
"""Install latest update."""
|
||||
try:
|
||||
await self.adguard.update.begin_update()
|
||||
except AdGuardHomeError as err:
|
||||
raise HomeAssistantError(f"Failed to install update: {err}") from err
|
||||
self.hass.config_entries.async_schedule_reload(self._entry.entry_id)
|
||||
@@ -36,5 +36,13 @@
|
||||
"alarm_trigger": {
|
||||
"service": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"disarmed": {
|
||||
"trigger": "mdi:shield-off"
|
||||
},
|
||||
"triggered": {
|
||||
"trigger": "mdi:bell-ring"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,6 +71,15 @@
|
||||
"message": "Arming requires a code but none was given for {entity_id}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
@@ -143,5 +152,29 @@
|
||||
"name": "Trigger"
|
||||
}
|
||||
},
|
||||
"title": "Alarm control panel"
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"disarmed": {
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::disarmed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted alarms to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::triggered::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::triggers::disarmed::fields::behavior::description%]",
|
||||
"name": "[%key:component::alarm_control_panel::triggers::disarmed::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
16
homeassistant/components/alarm_control_panel/trigger.py
Normal file
16
homeassistant/components/alarm_control_panel/trigger.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Provides triggers for alarm control panels."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for alarm control panels."""
|
||||
return TRIGGERS
|
||||
19
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
19
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
disarmed: *trigger_common
|
||||
|
||||
triggered: *trigger_common
|
||||
@@ -6,9 +6,8 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import Event, HassJob, HomeAssistant, callback
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -20,7 +19,7 @@ from .analytics import (
|
||||
EntityAnalyticsModifications,
|
||||
async_devices_payload,
|
||||
)
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
__all__ = [
|
||||
@@ -43,28 +42,9 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
# Load stored data
|
||||
await analytics.load()
|
||||
|
||||
@callback
|
||||
def start_schedule(_event: Event) -> None:
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
# Wait 15 min after started
|
||||
async_call_later(
|
||||
hass,
|
||||
900,
|
||||
HassJob(
|
||||
analytics.send_analytics,
|
||||
name="analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Send every day
|
||||
async_track_time_interval(
|
||||
hass,
|
||||
analytics.send_analytics,
|
||||
INTERVAL,
|
||||
name="analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
await analytics.async_schedule()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
@@ -111,7 +91,7 @@ async def websocket_analytics_preferences(
|
||||
analytics = hass.data[DATA_COMPONENT]
|
||||
|
||||
await analytics.save_preferences(preferences)
|
||||
await analytics.send_analytics()
|
||||
await analytics.async_schedule()
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
|
||||
@@ -7,6 +7,8 @@ from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
||||
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
import random
|
||||
import time
|
||||
from typing import Any, Protocol
|
||||
import uuid
|
||||
|
||||
@@ -31,10 +33,18 @@ from homeassistant.const import (
|
||||
BASE_PLATFORMS,
|
||||
__version__ as HA_VERSION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HassJob,
|
||||
HomeAssistant,
|
||||
ReleaseChannel,
|
||||
callback,
|
||||
get_release_channel,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -51,6 +61,7 @@ from homeassistant.setup import async_get_loaded_integrations
|
||||
from .const import (
|
||||
ANALYTICS_ENDPOINT_URL,
|
||||
ANALYTICS_ENDPOINT_URL_DEV,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
ATTR_ADDON_COUNT,
|
||||
ATTR_ADDONS,
|
||||
ATTR_ARCH,
|
||||
@@ -71,6 +82,7 @@ from .const import (
|
||||
ATTR_PROTECTED,
|
||||
ATTR_RECORDER,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_STATE_COUNT,
|
||||
ATTR_STATISTICS,
|
||||
ATTR_SUPERVISOR,
|
||||
@@ -80,8 +92,10 @@ from .const import (
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
DOMAIN,
|
||||
INTERVAL,
|
||||
LOGGER,
|
||||
PREFERENCE_SCHEMA,
|
||||
SNAPSHOT_VERSION,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
@@ -194,13 +208,18 @@ def gen_uuid() -> str:
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
RELEASE_CHANNEL = get_release_channel()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Analytics data."""
|
||||
|
||||
onboarded: bool
|
||||
preferences: dict[str, bool]
|
||||
uuid: str | None
|
||||
uuid: str | None = None
|
||||
submission_identifier: str | None = None
|
||||
snapshot_submission_time: float | None = None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
|
||||
@@ -209,6 +228,8 @@ class AnalyticsData:
|
||||
data["onboarded"],
|
||||
data["preferences"],
|
||||
data["uuid"],
|
||||
data.get("submission_identifier"),
|
||||
data.get("snapshot_submission_time"),
|
||||
)
|
||||
|
||||
|
||||
@@ -219,8 +240,10 @@ class Analytics:
|
||||
"""Initialize the Analytics class."""
|
||||
self.hass: HomeAssistant = hass
|
||||
self.session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {}, None)
|
||||
self._data = AnalyticsData(False, {})
|
||||
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._basic_scheduled: CALLBACK_TYPE | None = None
|
||||
self._snapshot_scheduled: CALLBACK_TYPE | None = None
|
||||
|
||||
@property
|
||||
def preferences(self) -> dict:
|
||||
@@ -228,6 +251,7 @@ class Analytics:
|
||||
preferences = self._data.preferences
|
||||
return {
|
||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
|
||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||
@@ -244,9 +268,9 @@ class Analytics:
|
||||
return self._data.uuid
|
||||
|
||||
@property
|
||||
def endpoint(self) -> str:
|
||||
def endpoint_basic(self) -> str:
|
||||
"""Return the endpoint that will receive the payload."""
|
||||
if HA_VERSION.endswith("0.dev0"):
|
||||
if RELEASE_CHANNEL is ReleaseChannel.DEV:
|
||||
# dev installations will contact the dev analytics environment
|
||||
return ANALYTICS_ENDPOINT_URL_DEV
|
||||
return ANALYTICS_ENDPOINT_URL
|
||||
@@ -277,13 +301,17 @@ class Analytics:
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
||||
|
||||
async def _save(self) -> None:
|
||||
"""Save data."""
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
async def save_preferences(self, preferences: dict) -> None:
|
||||
"""Save preferences."""
|
||||
preferences = PREFERENCE_SCHEMA(preferences)
|
||||
self._data.preferences.update(preferences)
|
||||
self._data.onboarded = True
|
||||
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
await self._save()
|
||||
|
||||
if self.supervisor:
|
||||
await hassio.async_update_diagnostics(
|
||||
@@ -292,17 +320,16 @@ class Analytics:
|
||||
|
||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||
"""Send analytics."""
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
return
|
||||
|
||||
hass = self.hass
|
||||
supervisor_info = None
|
||||
operating_system_info: dict[str, Any] = {}
|
||||
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
LOGGER.debug("Nothing to submit")
|
||||
return
|
||||
|
||||
if self._data.uuid is None:
|
||||
self._data.uuid = gen_uuid()
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
await self._save()
|
||||
|
||||
if self.supervisor:
|
||||
supervisor_info = hassio.get_supervisor_info(hass)
|
||||
@@ -436,7 +463,7 @@ class Analytics:
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self.session.post(self.endpoint, json=payload)
|
||||
response = await self.session.post(self.endpoint_basic, json=payload)
|
||||
if response.status == 200:
|
||||
LOGGER.info(
|
||||
(
|
||||
@@ -449,7 +476,7 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Sending analytics failed with statuscode %s from %s",
|
||||
response.status,
|
||||
self.endpoint,
|
||||
self.endpoint_basic,
|
||||
)
|
||||
except TimeoutError:
|
||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
||||
@@ -489,6 +516,182 @@ class Analytics:
|
||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||
)
|
||||
|
||||
async def send_snapshot(self, _: datetime | None = None) -> None:
|
||||
"""Send a snapshot."""
|
||||
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
|
||||
return
|
||||
|
||||
payload = await _async_snapshot_payload(self.hass)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": f"home-assistant/{HA_VERSION}",
|
||||
}
|
||||
if self._data.submission_identifier is not None:
|
||||
headers["X-Device-Database-Submission-Identifier"] = (
|
||||
self._data.submission_identifier
|
||||
)
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self.session.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL, json=payload, headers=headers
|
||||
)
|
||||
|
||||
if response.status == 200: # OK
|
||||
response_data = await response.json()
|
||||
new_identifier = response_data.get("submission_identifier")
|
||||
|
||||
if (
|
||||
new_identifier is not None
|
||||
and new_identifier != self._data.submission_identifier
|
||||
):
|
||||
self._data.submission_identifier = new_identifier
|
||||
await self._save()
|
||||
|
||||
LOGGER.info(
|
||||
"Submitted snapshot analytics to Home Assistant servers"
|
||||
)
|
||||
|
||||
elif response.status == 400: # Bad Request
|
||||
response_data = await response.json()
|
||||
error_kind = response_data.get("kind", "unknown")
|
||||
error_message = response_data.get("message", "Unknown error")
|
||||
|
||||
if error_kind == "invalid-submission-identifier":
|
||||
# Clear the invalid identifier and retry on next cycle
|
||||
LOGGER.warning(
|
||||
"Invalid submission identifier to %s, clearing: %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
error_message,
|
||||
)
|
||||
self._data.submission_identifier = None
|
||||
await self._save()
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Malformed snapshot analytics submission (%s) to %s: %s",
|
||||
error_kind,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
error_message,
|
||||
)
|
||||
|
||||
elif response.status == 503: # Service Unavailable
|
||||
response_text = await response.text()
|
||||
LOGGER.warning(
|
||||
"Snapshot analytics service %s unavailable: %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
response_text,
|
||||
)
|
||||
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Unexpected status code %s when submitting snapshot analytics to %s",
|
||||
response.status,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
except TimeoutError:
|
||||
LOGGER.error(
|
||||
"Timeout sending snapshot analytics to %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error(
|
||||
"Error sending snapshot analytics to %s: %r",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
err,
|
||||
)
|
||||
|
||||
async def async_schedule(self) -> None:
|
||||
"""Schedule analytics."""
|
||||
if not self.onboarded:
|
||||
LOGGER.debug("Analytics not scheduled")
|
||||
if self._basic_scheduled is not None:
|
||||
self._basic_scheduled()
|
||||
self._basic_scheduled = None
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
self._snapshot_scheduled = None
|
||||
return
|
||||
|
||||
if not self.preferences.get(ATTR_BASE, False):
|
||||
LOGGER.debug("Basic analytics not scheduled")
|
||||
if self._basic_scheduled is not None:
|
||||
self._basic_scheduled()
|
||||
self._basic_scheduled = None
|
||||
elif self._basic_scheduled is None:
|
||||
# Wait 15 min after started for basic analytics
|
||||
self._basic_scheduled = async_call_later(
|
||||
self.hass,
|
||||
900,
|
||||
HassJob(
|
||||
self._async_schedule_basic,
|
||||
name="basic analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or RELEASE_CHANNEL not in (
|
||||
ReleaseChannel.DEV,
|
||||
ReleaseChannel.NIGHTLY,
|
||||
):
|
||||
LOGGER.debug("Snapshot analytics not scheduled")
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
self._snapshot_scheduled = None
|
||||
elif self._snapshot_scheduled is None:
|
||||
snapshot_submission_time = self._data.snapshot_submission_time
|
||||
|
||||
if snapshot_submission_time is None:
|
||||
# Randomize the submission time within the 24 hours
|
||||
snapshot_submission_time = random.uniform(0, 86400)
|
||||
self._data.snapshot_submission_time = snapshot_submission_time
|
||||
await self._save()
|
||||
LOGGER.debug(
|
||||
"Initialized snapshot submission time to %s",
|
||||
snapshot_submission_time,
|
||||
)
|
||||
|
||||
# Calculate delay until next submission
|
||||
current_time = time.time()
|
||||
delay = (snapshot_submission_time - current_time) % 86400
|
||||
|
||||
self._snapshot_scheduled = async_call_later(
|
||||
self.hass,
|
||||
delay,
|
||||
HassJob(
|
||||
self._async_schedule_snapshots,
|
||||
name="snapshot analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_schedule_basic(self, _: datetime | None = None) -> None:
|
||||
"""Schedule basic analytics."""
|
||||
await self.send_analytics()
|
||||
|
||||
# Send basic analytics every day
|
||||
self._basic_scheduled = async_track_time_interval(
|
||||
self.hass,
|
||||
self.send_analytics,
|
||||
INTERVAL,
|
||||
name="basic analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
async def _async_schedule_snapshots(self, _: datetime | None = None) -> None:
|
||||
"""Schedule snapshot analytics."""
|
||||
await self.send_snapshot()
|
||||
|
||||
# Send snapshot analytics every day
|
||||
self._snapshot_scheduled = async_track_time_interval(
|
||||
self.hass,
|
||||
self.send_snapshot,
|
||||
INTERVAL,
|
||||
name="snapshot analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
|
||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
"""Extract domains from the YAML configuration."""
|
||||
@@ -505,8 +708,8 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices."""
|
||||
async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices for a snapshot."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
@@ -711,8 +914,13 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return integrations_info
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices for a direct download."""
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"version": f"home-assistant:{SNAPSHOT_VERSION}",
|
||||
"home_assistant": HA_VERSION,
|
||||
"integrations": integrations_info,
|
||||
"integrations": await _async_snapshot_payload(hass),
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import voluptuous as vol
|
||||
|
||||
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
SNAPSHOT_VERSION = "1"
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL = f"https://device-database.eco-dev-aws.openhomefoundation.com/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
DOMAIN = "analytics"
|
||||
INTERVAL = timedelta(days=1)
|
||||
STORAGE_KEY = "core.analytics"
|
||||
@@ -38,6 +40,7 @@ ATTR_PREFERENCES = "preferences"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_RECORDER = "recorder"
|
||||
ATTR_SLUG = "slug"
|
||||
ATTR_SNAPSHOTS = "snapshots"
|
||||
ATTR_STATE_COUNT = "state_count"
|
||||
ATTR_STATISTICS = "statistics"
|
||||
ATTR_SUPERVISOR = "supervisor"
|
||||
@@ -51,6 +54,7 @@ ATTR_VERSION = "version"
|
||||
PREFERENCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BASE): bool,
|
||||
vol.Optional(ATTR_SNAPSHOTS): bool,
|
||||
vol.Optional(ATTR_DIAGNOSTICS): bool,
|
||||
vol.Optional(ATTR_STATISTICS): bool,
|
||||
vol.Optional(ATTR_USAGE): bool,
|
||||
|
||||
@@ -392,7 +392,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
input={},
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
@@ -459,7 +459,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
type="server_tool_use",
|
||||
id=response.content_block.id,
|
||||
name=response.content_block.name,
|
||||
input="",
|
||||
input={},
|
||||
)
|
||||
current_tool_args = ""
|
||||
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.69.0"]
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
}
|
||||
|
||||
@@ -7,3 +7,26 @@ CONNECTION_TIMEOUT: int = 10
|
||||
|
||||
# Field name of last self test retrieved from apcupsd.
|
||||
LAST_S_TEST: Final = "laststest"
|
||||
|
||||
# Mapping of deprecated sensor keys (as reported by apcupsd, lower-cased) to their deprecation
|
||||
# repair issue translation keys.
|
||||
DEPRECATED_SENSORS: Final = {
|
||||
"apc": "apc_deprecated",
|
||||
"end apc": "date_deprecated",
|
||||
"date": "date_deprecated",
|
||||
"apcmodel": "available_via_device_info",
|
||||
"model": "available_via_device_info",
|
||||
"firmware": "available_via_device_info",
|
||||
"version": "available_via_device_info",
|
||||
"upsname": "available_via_device_info",
|
||||
"serialno": "available_via_device_info",
|
||||
}
|
||||
|
||||
AVAILABLE_VIA_DEVICE_ATTR: Final = {
|
||||
"apcmodel": "model",
|
||||
"model": "model",
|
||||
"firmware": "hw_version",
|
||||
"version": "sw_version",
|
||||
"upsname": "name",
|
||||
"serialno": "serial_number",
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -22,9 +24,11 @@ from homeassistant.const import (
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.issue_registry as ir
|
||||
|
||||
from .const import LAST_S_TEST
|
||||
from .const import AVAILABLE_VIA_DEVICE_ATTR, DEPRECATED_SENSORS, DOMAIN, LAST_S_TEST
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
from .entity import APCUPSdEntity
|
||||
|
||||
@@ -528,3 +532,62 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||
if not self.native_unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = inferred_unit
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle when entity is added to Home Assistant.
|
||||
|
||||
If this is a deprecated sensor entity, create a repair issue to guide
|
||||
the user to disable it.
|
||||
"""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
reason = DEPRECATED_SENSORS.get(self.entity_description.key)
|
||||
if not reason:
|
||||
return
|
||||
|
||||
automations = automations_with_entity(self.hass, self.entity_id)
|
||||
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
if not automations and not scripts:
|
||||
return
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
items = [
|
||||
f"- [{entry.name or entry.original_name or entity_id}]"
|
||||
f"(/config/{integration}/edit/{entry.unique_id or entity_id.split('.', 1)[-1]})"
|
||||
for integration, entities in (
|
||||
("automation", automations),
|
||||
("script", scripts),
|
||||
)
|
||||
for entity_id in entities
|
||||
if (entry := entity_registry.async_get(entity_id))
|
||||
]
|
||||
placeholders = {
|
||||
"entity_name": str(self.name or self.entity_id),
|
||||
"entity_id": self.entity_id,
|
||||
"items": "\n".join(items),
|
||||
}
|
||||
if via_attr := AVAILABLE_VIA_DEVICE_ATTR.get(self.entity_description.key):
|
||||
placeholders["available_via_device_attr"] = via_attr
|
||||
if device_entry := self.device_entry:
|
||||
placeholders["device_id"] = device_entry.id
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"{reason}_{self.entity_id}",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=reason,
|
||||
translation_placeholders=placeholders,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle when entity will be removed from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
if issue_key := DEPRECATED_SENSORS.get(self.entity_description.key):
|
||||
ir.async_delete_issue(self.hass, DOMAIN, f"{issue_key}_{self.entity_id}")
|
||||
|
||||
@@ -241,5 +241,19 @@
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"apc_deprecated": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because it exposes internal details of the APC UPS Daemon response.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use supported APC UPS entities instead. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
},
|
||||
"available_via_device_info": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the same value is available from the device registry via `device_attr(\"{device_id}\", \"{available_via_device_attr}\")`.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use the `device_attr` helper instead of this sensor. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
},
|
||||
"date_deprecated": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the timestamp is already available from other APC UPS sensors via their last updated time.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to reference any entity's `last_updated` attribute instead (for example, `states.binary_sensor.apcups_online_status.last_updated`). Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,5 +14,19 @@
|
||||
"start_conversation": {
|
||||
"service": "mdi:forum"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"trigger": "mdi:chat-sleep"
|
||||
},
|
||||
"listening": {
|
||||
"trigger": "mdi:chat-question"
|
||||
},
|
||||
"processing": {
|
||||
"trigger": "mdi:chat-processing"
|
||||
},
|
||||
"responding": {
|
||||
"trigger": "mdi:chat-alert"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,13 @@
|
||||
"id": "Answer ID",
|
||||
"sentences": "Sentences"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -98,5 +105,51 @@
|
||||
"name": "Start conversation"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite"
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers when an assist satellite becomes idle.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::idle::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted assist satellites to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When an assist satellite becomes idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers when an assist satellite starts listening.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::listening::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::triggers::idle::fields::behavior::description%]",
|
||||
"name": "[%key:component::assist_satellite::triggers::idle::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an assist satellite starts listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers when an assist satellite is processing.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::processing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::triggers::idle::fields::behavior::description%]",
|
||||
"name": "[%key:component::assist_satellite::triggers::idle::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an assist satellite is processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers when an assist satellite is responding.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::responding::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::triggers::idle::fields::behavior::description%]",
|
||||
"name": "[%key:component::assist_satellite::triggers::idle::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an assist satellite is responding"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
19
homeassistant/components/assist_satellite/trigger.py
Normal file
19
homeassistant/components/assist_satellite/trigger.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
|
||||
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for assist satellites."""
|
||||
return TRIGGERS
|
||||
20
homeassistant/components/assist_satellite/triggers.yaml
Normal file
20
homeassistant/components/assist_satellite/triggers.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
idle: *trigger_common
|
||||
listening: *trigger_common
|
||||
processing: *trigger_common
|
||||
responding: *trigger_common
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==2.45.0",
|
||||
"dbus-fast==3.0.0",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -96,5 +96,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:power-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,6 +187,13 @@
|
||||
"heat_cool": "Heat/cool",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -285,5 +292,29 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Climate"
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::triggers::turned_off::fields::behavior::description%]",
|
||||
"name": "[%key:component::climate::triggers::turned_off::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate starts to heat"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers when a climate is turned off.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted climates to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned off"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
23
homeassistant/components/climate/trigger.py
Normal file
23
homeassistant/components/climate/trigger.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
from homeassistant.const import STATE_OFF
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_entity_state_attribute_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"started_heating": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for climates."""
|
||||
return TRIGGERS
|
||||
19
homeassistant/components/climate/triggers.yaml
Normal file
19
homeassistant/components/climate/triggers.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
started_heating: *trigger_common
|
||||
|
||||
turned_off: *trigger_common
|
||||
1
homeassistant/components/cosori/__init__.py
Normal file
1
homeassistant/components/cosori/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Cosori."""
|
||||
6
homeassistant/components/cosori/manifest.json
Normal file
6
homeassistant/components/cosori/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "cosori",
|
||||
"name": "Cosori",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "vesync"
|
||||
}
|
||||
@@ -108,5 +108,10 @@
|
||||
"toggle_cover_tilt": {
|
||||
"service": "mdi:arrow-top-right-bottom-left"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"garage_opened": {
|
||||
"trigger": "mdi:garage-open"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,6 +82,15 @@
|
||||
"name": "Window"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"close_cover": {
|
||||
"description": "Closes a cover.",
|
||||
@@ -136,5 +145,22 @@
|
||||
"name": "Toggle tilt"
|
||||
}
|
||||
},
|
||||
"title": "Cover"
|
||||
"title": "Cover",
|
||||
"triggers": {
|
||||
"garage_opened": {
|
||||
"description": "Triggers when a garage door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::garage_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted garage doors to trigger on.",
|
||||
"name": "Behavior"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the garage doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a garage door opens"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
108
homeassistant/components/cover/trigger.py
Normal file
108
homeassistant/components/cover/trigger.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""Provides triggers for covers."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
)
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_FULLY_OPENED: Final = "fully_opened"
|
||||
|
||||
COVER_OPENED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(ATTR_FULLY_OPENED, default=False): bool,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_device_class_or_undefined(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> str | None | UndefinedType:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return get_device_class(hass, entity_id)
|
||||
except HomeAssistantError:
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class CoverOpenedClosedTrigger(EntityTriggerBase):
|
||||
"""Class for cover opened and closed triggers."""
|
||||
|
||||
_attribute: str = ATTR_CURRENT_POSITION
|
||||
_attribute_value: int | None = None
|
||||
_device_class: CoverDeviceClass | None
|
||||
_domain: str = DOMAIN
|
||||
_to_states: set[str]
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
if state.state not in self._to_states:
|
||||
return False
|
||||
if (
|
||||
self._attribute_value is not None
|
||||
and (value := state.attributes.get(self._attribute)) is not None
|
||||
and value != self._attribute_value
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if get_device_class_or_undefined(self._hass, entity_id)
|
||||
== self._device_class
|
||||
}
|
||||
|
||||
|
||||
class CoverOpenedTrigger(CoverOpenedClosedTrigger):
|
||||
"""Class for cover opened triggers."""
|
||||
|
||||
_schema = COVER_OPENED_TRIGGER_SCHEMA
|
||||
_to_states = {CoverState.OPEN, CoverState.OPENING}
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if self._options.get(ATTR_FULLY_OPENED):
|
||||
self._attribute_value = 100
|
||||
|
||||
|
||||
def make_cover_opened_trigger(
|
||||
device_class: CoverDeviceClass | None,
|
||||
) -> type[CoverOpenedTrigger]:
|
||||
"""Create an entity state attribute trigger class."""
|
||||
|
||||
class CustomTrigger(CoverOpenedTrigger):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"garage_opened": make_cover_opened_trigger(CoverDeviceClass.GARAGE),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for covers."""
|
||||
return TRIGGERS
|
||||
21
homeassistant/components/cover/triggers.yaml
Normal file
21
homeassistant/components/cover/triggers.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
garage_opened:
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: garage
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
fully_opened:
|
||||
required: true
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
@@ -9,6 +9,7 @@ from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import (
|
||||
CONF_AUTHORIZE_STRING,
|
||||
@@ -31,9 +32,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool
|
||||
expires_at=entry.data[CONF_EXPIRES_AT],
|
||||
)
|
||||
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
|
||||
ssl_context = get_default_context()
|
||||
|
||||
try:
|
||||
cync = await Cync.create(cync_auth)
|
||||
cync = await Cync.create(
|
||||
auth=cync_auth,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
except AuthFailedError as ex:
|
||||
raise ConfigEntryAuthFailed("User token invalid") from ex
|
||||
except CyncError as ex:
|
||||
|
||||
@@ -47,5 +47,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:fan"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"trigger": "mdi:fan-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:fan"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,6 +66,13 @@
|
||||
"forward": "Forward",
|
||||
"reverse": "Reverse"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -152,5 +159,29 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Fan"
|
||||
"title": "Fan",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a fan is turned off.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted fans to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a fan is turned on.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::triggers::turned_off::fields::behavior::description%]",
|
||||
"name": "[%key:component::fan::triggers::turned_off::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
homeassistant/components/fan/trigger.py
Normal file
17
homeassistant/components/fan/trigger.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides triggers for fans."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for fans."""
|
||||
return TRIGGERS
|
||||
18
homeassistant/components/fan/triggers.yaml
Normal file
18
homeassistant/components/fan/triggers.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: fan
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
@@ -778,7 +778,7 @@ class ManifestJSONView(HomeAssistantView):
|
||||
{
|
||||
"type": "frontend/get_icons",
|
||||
vol.Required("category"): vol.In(
|
||||
{"entity", "entity_component", "services", "triggers", "conditions"}
|
||||
{"conditions", "entity", "entity_component", "services", "triggers"}
|
||||
),
|
||||
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["gassist-text==0.0.14"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: No polling.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: No entities to update.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration acts as a service and does not represent physical devices.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: This is a cloud service integration that cannot be discovered locally.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: No entities to update.
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: No entities.
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: No repairs.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: No devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library uses gRPC, not aiohttp/httpx, for communication.
|
||||
strict-typing: done
|
||||
@@ -56,6 +56,9 @@
|
||||
"init": {
|
||||
"data": {
|
||||
"language_code": "Language code"
|
||||
},
|
||||
"data_description": {
|
||||
"language_code": "Language for the Google Assistant SDK requests and responses."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ from .const import DOMAIN
|
||||
if TYPE_CHECKING:
|
||||
from . import GoogleSheetsConfigEntry
|
||||
|
||||
ADD_CREATED_COLUMN = "add_created_column"
|
||||
DATA = "data"
|
||||
DATA_CONFIG_ENTRY = "config_entry"
|
||||
ROWS = "rows"
|
||||
@@ -43,6 +44,7 @@ SHEET_SERVICE_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(DATA_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
vol.Optional(WORKSHEET): cv.string,
|
||||
vol.Optional(ADD_CREATED_COLUMN, default=True): cv.boolean,
|
||||
vol.Required(DATA): vol.Any(cv.ensure_list, [dict]),
|
||||
},
|
||||
)
|
||||
@@ -69,10 +71,11 @@ def _append_to_sheet(call: ServiceCall, entry: GoogleSheetsConfigEntry) -> None:
|
||||
|
||||
worksheet = sheet.worksheet(call.data.get(WORKSHEET, sheet.sheet1.title))
|
||||
columns: list[str] = next(iter(worksheet.get_values("A1:ZZ1")), [])
|
||||
add_created_column = call.data[ADD_CREATED_COLUMN]
|
||||
now = str(datetime.now())
|
||||
rows = []
|
||||
for d in call.data[DATA]:
|
||||
row_data = {"created": now} | d
|
||||
row_data = ({"created": now} | d) if add_created_column else d
|
||||
row = [row_data.get(column, "") for column in columns]
|
||||
for key, value in row_data.items():
|
||||
if key not in columns:
|
||||
|
||||
@@ -9,6 +9,11 @@ append_sheet:
|
||||
example: "Sheet1"
|
||||
selector:
|
||||
text:
|
||||
add_created_column:
|
||||
required: false
|
||||
default: true
|
||||
selector:
|
||||
boolean:
|
||||
data:
|
||||
required: true
|
||||
example: '{"hello": world, "cool": True, "count": 5}'
|
||||
|
||||
@@ -45,6 +45,10 @@
|
||||
"append_sheet": {
|
||||
"description": "Appends data to a worksheet in Google Sheets.",
|
||||
"fields": {
|
||||
"add_created_column": {
|
||||
"description": "Add a \"created\" column with the current date-time to the appended data.",
|
||||
"name": "Add created column"
|
||||
},
|
||||
"config_entry": {
|
||||
"description": "The sheet to add data to.",
|
||||
"name": "Sheet"
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2.7.0"]
|
||||
"requirements": ["aioautomower==2.7.1"]
|
||||
}
|
||||
|
||||
@@ -112,6 +112,7 @@ async def async_setup_entry(
|
||||
update_method=async_update_data,
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=timedelta(hours=1),
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
|
||||
@@ -12,6 +12,7 @@ from pyicloud.exceptions import (
|
||||
PyiCloudFailedLoginException,
|
||||
PyiCloudNoDevicesException,
|
||||
PyiCloudServiceNotActivatedException,
|
||||
PyiCloudServiceUnavailable,
|
||||
)
|
||||
from pyicloud.services.findmyiphone import AppleDevice
|
||||
|
||||
@@ -130,15 +131,21 @@ class IcloudAccount:
|
||||
except (
|
||||
PyiCloudServiceNotActivatedException,
|
||||
PyiCloudNoDevicesException,
|
||||
PyiCloudServiceUnavailable,
|
||||
) as err:
|
||||
_LOGGER.error("No iCloud device found")
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
self._owner_fullname = f"{user_info['firstName']} {user_info['lastName']}"
|
||||
if user_info is None:
|
||||
raise ConfigEntryNotReady("No user info found in iCloud devices response")
|
||||
|
||||
self._owner_fullname = (
|
||||
f"{user_info.get('firstName')} {user_info.get('lastName')}"
|
||||
)
|
||||
|
||||
self._family_members_fullname = {}
|
||||
if user_info.get("membersInfo") is not None:
|
||||
for prs_id, member in user_info["membersInfo"].items():
|
||||
for prs_id, member in user_info.get("membersInfo").items():
|
||||
self._family_members_fullname[prs_id] = (
|
||||
f"{member['firstName']} {member['lastName']}"
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/icloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["keyrings.alt", "pyicloud"],
|
||||
"requirements": ["pyicloud==2.1.0"]
|
||||
"requirements": ["pyicloud==2.2.0"]
|
||||
}
|
||||
|
||||
@@ -237,14 +237,23 @@ class SettingDataUpdateCoordinator(
|
||||
"""Implementation of PlenticoreUpdateCoordinator for settings data."""
|
||||
|
||||
async def _async_update_data(self) -> Mapping[str, Mapping[str, str]]:
|
||||
client = self._plenticore.client
|
||||
|
||||
if not self._fetch or client is None:
|
||||
if (client := self._plenticore.client) is None:
|
||||
return {}
|
||||
|
||||
_LOGGER.debug("Fetching %s for %s", self.name, self._fetch)
|
||||
fetch = defaultdict(set)
|
||||
|
||||
return await client.get_setting_values(self._fetch)
|
||||
for module_id, data_ids in self._fetch.items():
|
||||
fetch[module_id].update(data_ids)
|
||||
|
||||
for module_id, data_id in self.async_contexts():
|
||||
fetch[module_id].add(data_id)
|
||||
|
||||
if not fetch:
|
||||
return {}
|
||||
|
||||
_LOGGER.debug("Fetching %s for %s", self.name, fetch)
|
||||
|
||||
return await client.get_setting_values(fetch)
|
||||
|
||||
|
||||
class PlenticoreSelectUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
|
||||
|
||||
@@ -34,6 +34,29 @@ async def async_get_config_entry_diagnostics(
|
||||
},
|
||||
}
|
||||
|
||||
# Add important information how the inverter is configured
|
||||
string_count_setting = await plenticore.client.get_setting_values(
|
||||
"devices:local", "Properties:StringCnt"
|
||||
)
|
||||
try:
|
||||
string_count = int(
|
||||
string_count_setting["devices:local"]["Properties:StringCnt"]
|
||||
)
|
||||
except ValueError:
|
||||
string_count = 0
|
||||
|
||||
configuration_settings = await plenticore.client.get_setting_values(
|
||||
"devices:local",
|
||||
(
|
||||
"Properties:StringCnt",
|
||||
*(f"Properties:String{idx}Features" for idx in range(string_count)),
|
||||
),
|
||||
)
|
||||
|
||||
data["configuration"] = {
|
||||
**configuration_settings,
|
||||
}
|
||||
|
||||
device_info = {**plenticore.device_info}
|
||||
device_info[ATTR_IDENTIFIERS] = REDACTED # contains serial number
|
||||
data["device"] = device_info
|
||||
|
||||
@@ -5,12 +5,13 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, Final
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -66,7 +67,7 @@ async def async_setup_entry(
|
||||
"""Add kostal plenticore Switch."""
|
||||
plenticore = entry.runtime_data
|
||||
|
||||
entities = []
|
||||
entities: list[Entity] = []
|
||||
|
||||
available_settings_data = await plenticore.client.get_settings()
|
||||
settings_data_update_coordinator = SettingDataUpdateCoordinator(
|
||||
@@ -103,6 +104,57 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
# add shadow management switches for strings which support it
|
||||
string_count_setting = await plenticore.client.get_setting_values(
|
||||
"devices:local", "Properties:StringCnt"
|
||||
)
|
||||
try:
|
||||
string_count = int(
|
||||
string_count_setting["devices:local"]["Properties:StringCnt"]
|
||||
)
|
||||
except ValueError:
|
||||
string_count = 0
|
||||
|
||||
dc_strings = tuple(range(string_count))
|
||||
dc_string_feature_ids = tuple(
|
||||
PlenticoreShadowMgmtSwitch.DC_STRING_FEATURE_DATA_ID % dc_string
|
||||
for dc_string in dc_strings
|
||||
)
|
||||
|
||||
dc_string_features = await plenticore.client.get_setting_values(
|
||||
PlenticoreShadowMgmtSwitch.MODULE_ID,
|
||||
dc_string_feature_ids,
|
||||
)
|
||||
|
||||
for dc_string, dc_string_feature_id in zip(
|
||||
dc_strings, dc_string_feature_ids, strict=True
|
||||
):
|
||||
try:
|
||||
dc_string_feature = int(
|
||||
dc_string_features[PlenticoreShadowMgmtSwitch.MODULE_ID][
|
||||
dc_string_feature_id
|
||||
]
|
||||
)
|
||||
except ValueError:
|
||||
dc_string_feature = 0
|
||||
|
||||
if dc_string_feature == PlenticoreShadowMgmtSwitch.SHADOW_MANAGEMENT_SUPPORT:
|
||||
entities.append(
|
||||
PlenticoreShadowMgmtSwitch(
|
||||
settings_data_update_coordinator,
|
||||
dc_string,
|
||||
entry.entry_id,
|
||||
entry.title,
|
||||
plenticore.device_info,
|
||||
)
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Skipping shadow management for DC string %d, not supported (Feature: %d)",
|
||||
dc_string + 1,
|
||||
dc_string_feature,
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -136,7 +188,6 @@ class PlenticoreDataSwitch(
|
||||
self.off_value = description.off_value
|
||||
self.off_label = description.off_label
|
||||
self._attr_unique_id = f"{entry_id}_{description.module_id}_{description.key}"
|
||||
|
||||
self._attr_device_info = device_info
|
||||
|
||||
@property
|
||||
@@ -189,3 +240,98 @@ class PlenticoreDataSwitch(
|
||||
f"{self.platform_name} {self._name} {self.off_label}"
|
||||
)
|
||||
return bool(self.coordinator.data[self.module_id][self.data_id] == self._is_on)
|
||||
|
||||
|
||||
class PlenticoreShadowMgmtSwitch(
|
||||
CoordinatorEntity[SettingDataUpdateCoordinator], SwitchEntity
|
||||
):
|
||||
"""Representation of a Plenticore Switch for shadow management.
|
||||
|
||||
The shadow management switch can be controlled for each DC string separately. The DC string is
|
||||
coded as bit in a single settings value, bit 0 for DC string 1, bit 1 for DC string 2, etc.
|
||||
|
||||
Not all DC strings are available for shadown management, for example if one of them is used
|
||||
for a battery.
|
||||
"""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
entity_description: SwitchEntityDescription
|
||||
|
||||
MODULE_ID: Final = "devices:local"
|
||||
|
||||
SHADOW_DATA_ID: Final = "Generator:ShadowMgmt:Enable"
|
||||
"""Settings id for the bit coded shadow management."""
|
||||
|
||||
DC_STRING_FEATURE_DATA_ID: Final = "Properties:String%dFeatures"
|
||||
"""Settings id pattern for the DC string features."""
|
||||
|
||||
SHADOW_MANAGEMENT_SUPPORT: Final = 1
|
||||
"""Feature value for shadow management support in the DC string features."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: SettingDataUpdateCoordinator,
|
||||
dc_string: int,
|
||||
entry_id: str,
|
||||
platform_name: str,
|
||||
device_info: DeviceInfo,
|
||||
) -> None:
|
||||
"""Create a new Switch Entity for Plenticore shadow management."""
|
||||
super().__init__(coordinator, context=(self.MODULE_ID, self.SHADOW_DATA_ID))
|
||||
|
||||
self._mask: Final = 1 << dc_string
|
||||
|
||||
self.entity_description = SwitchEntityDescription(
|
||||
key=f"ShadowMgmt{dc_string}",
|
||||
name=f"Shadow Management DC string {dc_string + 1}",
|
||||
entity_registry_enabled_default=False,
|
||||
)
|
||||
|
||||
self.platform_name = platform_name
|
||||
self._attr_name = f"{platform_name} {self.entity_description.name}"
|
||||
self._attr_unique_id = (
|
||||
f"{entry_id}_{self.MODULE_ID}_{self.SHADOW_DATA_ID}_{dc_string}"
|
||||
)
|
||||
self._attr_device_info = device_info
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return (
|
||||
super().available
|
||||
and self.coordinator.data is not None
|
||||
and self.MODULE_ID in self.coordinator.data
|
||||
and self.SHADOW_DATA_ID in self.coordinator.data[self.MODULE_ID]
|
||||
)
|
||||
|
||||
def _get_shadow_mgmt_value(self) -> int:
|
||||
"""Return the current shadow management value for all strings as integer."""
|
||||
try:
|
||||
return int(self.coordinator.data[self.MODULE_ID][self.SHADOW_DATA_ID])
|
||||
except ValueError:
|
||||
return 0
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn shadow management on."""
|
||||
shadow_mgmt_value = self._get_shadow_mgmt_value()
|
||||
shadow_mgmt_value |= self._mask
|
||||
|
||||
if await self.coordinator.async_write_data(
|
||||
self.MODULE_ID, {self.SHADOW_DATA_ID: str(shadow_mgmt_value)}
|
||||
):
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn shadow management off."""
|
||||
shadow_mgmt_value = self._get_shadow_mgmt_value()
|
||||
shadow_mgmt_value &= ~self._mask
|
||||
|
||||
if await self.coordinator.async_write_data(
|
||||
self.MODULE_ID, {self.SHADOW_DATA_ID: str(shadow_mgmt_value)}
|
||||
):
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if shadow management is on."""
|
||||
return (self._get_shadow_mgmt_value() & self._mask) != 0
|
||||
|
||||
@@ -14,5 +14,19 @@
|
||||
"start_mowing": {
|
||||
"service": "mdi:play"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"docked": {
|
||||
"trigger": "mdi:home-import-outline"
|
||||
},
|
||||
"errored": {
|
||||
"trigger": "mdi:alert-circle-outline"
|
||||
},
|
||||
"paused_mowing": {
|
||||
"trigger": "mdi:pause"
|
||||
},
|
||||
"started_mowing": {
|
||||
"trigger": "mdi:play"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,15 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"dock": {
|
||||
"description": "Stops the mowing task and returns to the dock.",
|
||||
@@ -25,5 +34,51 @@
|
||||
"name": "Start mowing"
|
||||
}
|
||||
},
|
||||
"title": "Lawn mower"
|
||||
"title": "Lawn mower",
|
||||
"triggers": {
|
||||
"docked": {
|
||||
"description": "Triggers when a lawn mower has docked.",
|
||||
"description_configured": "[%key:component::lawn_mower::triggers::docked::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted lawn mowers to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has docked"
|
||||
},
|
||||
"errored": {
|
||||
"description": "Triggers when a lawn mower has errored.",
|
||||
"description_configured": "[%key:component::lawn_mower::triggers::errored::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::triggers::docked::fields::behavior::description%]",
|
||||
"name": "[%key:component::lawn_mower::triggers::docked::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has errored"
|
||||
},
|
||||
"paused_mowing": {
|
||||
"description": "Triggers when a lawn mower has paused mowing.",
|
||||
"description_configured": "[%key:component::lawn_mower::triggers::paused_mowing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::triggers::docked::fields::behavior::description%]",
|
||||
"name": "[%key:component::lawn_mower::triggers::docked::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has paused mowing"
|
||||
},
|
||||
"started_mowing": {
|
||||
"description": "Triggers when a lawn mower has started mowing.",
|
||||
"description_configured": "[%key:component::lawn_mower::triggers::started_mowing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::triggers::docked::fields::behavior::description%]",
|
||||
"name": "[%key:component::lawn_mower::triggers::docked::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a lawn mower has started mowing"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
18
homeassistant/components/lawn_mower/trigger.py
Normal file
18
homeassistant/components/lawn_mower/trigger.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Provides triggers for lawn mowers."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN, LawnMowerActivity
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"docked": make_entity_state_trigger(DOMAIN, LawnMowerActivity.DOCKED),
|
||||
"errored": make_entity_state_trigger(DOMAIN, LawnMowerActivity.ERROR),
|
||||
"paused_mowing": make_entity_state_trigger(DOMAIN, LawnMowerActivity.PAUSED),
|
||||
"started_mowing": make_entity_state_trigger(DOMAIN, LawnMowerActivity.MOWING),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for lawn mowers."""
|
||||
return TRIGGERS
|
||||
20
homeassistant/components/lawn_mower/triggers.yaml
Normal file
20
homeassistant/components/lawn_mower/triggers.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: lawn_mower
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
docked: *trigger_common
|
||||
errored: *trigger_common
|
||||
paused_mowing: *trigger_common
|
||||
started_mowing: *trigger_common
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
import pypck
|
||||
from pypck.connection import (
|
||||
@@ -48,7 +49,6 @@ from .const import (
|
||||
)
|
||||
from .helpers import (
|
||||
AddressType,
|
||||
InputType,
|
||||
LcnConfigEntry,
|
||||
LcnRuntimeData,
|
||||
async_update_config_entry,
|
||||
@@ -285,7 +285,7 @@ def _async_fire_access_control_event(
|
||||
hass: HomeAssistant,
|
||||
device: dr.DeviceEntry | None,
|
||||
address: AddressType,
|
||||
inp: InputType,
|
||||
inp: pypck.inputs.ModStatusAccessControl,
|
||||
) -> None:
|
||||
"""Fire access control event (transponder, transmitter, fingerprint, codelock)."""
|
||||
event_data = {
|
||||
@@ -299,7 +299,11 @@ def _async_fire_access_control_event(
|
||||
|
||||
if inp.periphery == pypck.lcn_defs.AccessControlPeriphery.TRANSMITTER:
|
||||
event_data.update(
|
||||
{"level": inp.level, "key": inp.key, "action": inp.action.value}
|
||||
{
|
||||
"level": inp.level,
|
||||
"key": inp.key,
|
||||
"action": cast(pypck.lcn_defs.KeyAction, inp.action).value,
|
||||
}
|
||||
)
|
||||
|
||||
event_name = f"lcn_{inp.periphery.value.lower()}"
|
||||
@@ -310,7 +314,7 @@ def _async_fire_send_keys_event(
|
||||
hass: HomeAssistant,
|
||||
device: dr.DeviceEntry | None,
|
||||
address: AddressType,
|
||||
inp: InputType,
|
||||
inp: pypck.inputs.ModSendKeysHost,
|
||||
) -> None:
|
||||
"""Fire send_keys event."""
|
||||
for table, action in enumerate(inp.actions):
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for LCN binary sensors."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
|
||||
import pypck
|
||||
@@ -19,6 +20,7 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -69,21 +71,11 @@ class LcnBinarySensor(LcnEntity, BinarySensorEntity):
|
||||
config[CONF_DOMAIN_DATA][CONF_SOURCE]
|
||||
]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.bin_sensor_port
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
self.bin_sensor_port
|
||||
)
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_binary_sensors(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Support for LCN climate control."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -36,6 +38,7 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -97,8 +100,6 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
self._max_temp = config[CONF_DOMAIN_DATA][CONF_MAX_TEMP]
|
||||
self._min_temp = config[CONF_DOMAIN_DATA][CONF_MIN_TEMP]
|
||||
|
||||
self._current_temperature = None
|
||||
self._target_temperature = None
|
||||
self._is_on = True
|
||||
|
||||
self._attr_hvac_modes = [HVACMode.HEAT]
|
||||
@@ -110,20 +111,6 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.variable)
|
||||
await self.device_connection.activate_status_request_handler(self.setpoint)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.variable)
|
||||
await self.device_connection.cancel_status_request_handler(self.setpoint)
|
||||
|
||||
@property
|
||||
def temperature_unit(self) -> str:
|
||||
"""Return the unit of measurement."""
|
||||
@@ -132,16 +119,6 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
return UnitOfTemperature.FAHRENHEIT
|
||||
return UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._current_temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._target_temperature
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode.
|
||||
@@ -177,7 +154,7 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
):
|
||||
return
|
||||
self._is_on = False
|
||||
self._target_temperature = None
|
||||
self._attr_target_temperature = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
@@ -189,19 +166,34 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
self.setpoint, temperature, self.unit
|
||||
):
|
||||
return
|
||||
self._target_temperature = temperature
|
||||
self._attr_target_temperature = temperature
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_variable(
|
||||
self.setpoint, SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set temperature value when LCN input object is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusVar):
|
||||
return
|
||||
|
||||
if input_obj.get_var() == self.variable:
|
||||
self._current_temperature = input_obj.get_value().to_var_unit(self.unit)
|
||||
self._attr_current_temperature = float(
|
||||
input_obj.get_value().to_var_unit(self.unit)
|
||||
)
|
||||
elif input_obj.get_var() == self.setpoint:
|
||||
self._is_on = not input_obj.get_value().is_locked_regulator()
|
||||
if self._is_on:
|
||||
self._target_temperature = input_obj.get_value().to_var_unit(self.unit)
|
||||
self._attr_target_temperature = float(
|
||||
input_obj.get_value().to_var_unit(self.unit)
|
||||
)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -120,7 +120,7 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
errors={CONF_BASE: error},
|
||||
)
|
||||
|
||||
data: dict = {
|
||||
data: dict[str, Any] = {
|
||||
**user_input,
|
||||
CONF_DEVICES: [],
|
||||
CONF_ENTITIES: [],
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
"""Support for LCN covers."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
import asyncio
|
||||
from collections.abc import Coroutine, Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
@@ -27,6 +29,7 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -73,11 +76,13 @@ async def async_setup_entry(
|
||||
class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
"""Representation of a LCN cover connected to output ports."""
|
||||
|
||||
_attr_is_closed = False
|
||||
_attr_is_closed = True
|
||||
_attr_is_closing = False
|
||||
_attr_is_opening = False
|
||||
_attr_assumed_state = True
|
||||
|
||||
reverse_time: pypck.lcn_defs.MotorReverseTime | None
|
||||
|
||||
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
|
||||
"""Initialize the LCN cover."""
|
||||
super().__init__(config, config_entry)
|
||||
@@ -93,28 +98,6 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
else:
|
||||
self.reverse_time = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"]
|
||||
)
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"]
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"]
|
||||
)
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"]
|
||||
)
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
state = pypck.lcn_defs.MotorStateModifier.DOWN
|
||||
@@ -147,6 +130,18 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
self._attr_is_opening = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
if not self.device_connection.is_group:
|
||||
await asyncio.gather(
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTUP"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
self.device_connection.request_status_output(
|
||||
pypck.lcn_defs.OutputPort["OUTPUTDOWN"], SCAN_INTERVAL.seconds
|
||||
),
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set cover states when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -175,7 +170,7 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
"""Representation of a LCN cover connected to relays."""
|
||||
|
||||
_attr_is_closed = False
|
||||
_attr_is_closed = True
|
||||
_attr_is_closing = False
|
||||
_attr_is_opening = False
|
||||
_attr_assumed_state = True
|
||||
@@ -206,20 +201,6 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
self._is_closing = False
|
||||
self._is_opening = False
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.motor, self.positioning_mode
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.motor)
|
||||
|
||||
async def async_close_cover(self, **kwargs: Any) -> None:
|
||||
"""Close the cover."""
|
||||
if not await self.device_connection.control_motor_relays(
|
||||
@@ -274,6 +255,25 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
coros: list[
|
||||
Coroutine[
|
||||
Any,
|
||||
Any,
|
||||
pypck.inputs.ModStatusRelays
|
||||
| pypck.inputs.ModStatusMotorPositionBS4
|
||||
| None,
|
||||
]
|
||||
] = [self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)]
|
||||
if self.positioning_mode == pypck.lcn_defs.MotorPositioningMode.BS4:
|
||||
coros.append(
|
||||
self.device_connection.request_status_motor_position(
|
||||
self.motor, self.positioning_mode, SCAN_INTERVAL.seconds
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*coros)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set cover states when LCN input object (command) is received."""
|
||||
if isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
@@ -293,7 +293,7 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
)
|
||||
and input_obj.motor == self.motor.value
|
||||
):
|
||||
self._attr_current_cover_position = input_obj.position
|
||||
self._attr_current_cover_position = int(input_obj.position)
|
||||
if self._attr_current_cover_position in [0, 100]:
|
||||
self._attr_is_opening = False
|
||||
self._attr_is_closing = False
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from pypck.device import DeviceConnection
|
||||
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -10,7 +12,6 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from .const import CONF_DOMAIN_DATA, DOMAIN
|
||||
from .helpers import (
|
||||
AddressType,
|
||||
DeviceConnectionType,
|
||||
InputType,
|
||||
LcnConfigEntry,
|
||||
generate_unique_id,
|
||||
@@ -22,9 +23,8 @@ from .helpers import (
|
||||
class LcnEntity(Entity):
|
||||
"""Parent class for all entities associated with the LCN component."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
device_connection: DeviceConnectionType
|
||||
device_connection: DeviceConnection
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -35,7 +35,7 @@ class LcnEntity(Entity):
|
||||
self.config = config
|
||||
self.config_entry = config_entry
|
||||
self.address: AddressType = config[CONF_ADDRESS]
|
||||
self._unregister_for_inputs: Callable | None = None
|
||||
self._unregister_for_inputs: Callable[[], None] | None = None
|
||||
self._name: str = config[CONF_NAME]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
@@ -57,15 +57,24 @@ class LcnEntity(Entity):
|
||||
).lower(),
|
||||
)
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Groups may not poll for a status."""
|
||||
return not self.device_connection.is_group
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
self.device_connection = get_device_connection(
|
||||
self.hass, self.config[CONF_ADDRESS], self.config_entry
|
||||
)
|
||||
if not self.device_connection.is_group:
|
||||
self._unregister_for_inputs = self.device_connection.register_for_inputs(
|
||||
self.input_received
|
||||
)
|
||||
if self.device_connection.is_group:
|
||||
return
|
||||
|
||||
self._unregister_for_inputs = self.device_connection.register_for_inputs(
|
||||
self.input_received
|
||||
)
|
||||
|
||||
self.schedule_update_ha_state(force_refresh=True)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import cast
|
||||
|
||||
import pypck
|
||||
from pypck.connection import PchkConnectionManager
|
||||
from pypck.device import DeviceConnection
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -48,7 +49,7 @@ class LcnRuntimeData:
|
||||
connection: PchkConnectionManager
|
||||
"""Connection to PCHK host."""
|
||||
|
||||
device_connections: dict[str, DeviceConnectionType]
|
||||
device_connections: dict[str, DeviceConnection]
|
||||
"""Logical addresses of devices connected to the host."""
|
||||
|
||||
add_entities_callbacks: dict[str, Callable[[Iterable[ConfigType]], None]]
|
||||
@@ -59,9 +60,8 @@ class LcnRuntimeData:
|
||||
type LcnConfigEntry = ConfigEntry[LcnRuntimeData]
|
||||
|
||||
type AddressType = tuple[int, int, bool]
|
||||
type DeviceConnectionType = pypck.module.ModuleConnection | pypck.module.GroupConnection
|
||||
|
||||
type InputType = type[pypck.inputs.Input]
|
||||
type InputType = pypck.inputs.Input
|
||||
|
||||
# Regex for address validation
|
||||
PATTERN_ADDRESS = re.compile(
|
||||
@@ -82,11 +82,11 @@ DOMAIN_LOOKUP = {
|
||||
|
||||
def get_device_connection(
|
||||
hass: HomeAssistant, address: AddressType, config_entry: LcnConfigEntry
|
||||
) -> DeviceConnectionType:
|
||||
) -> DeviceConnection:
|
||||
"""Return a lcn device_connection."""
|
||||
host_connection = config_entry.runtime_data.connection
|
||||
addr = pypck.lcn_addr.LcnAddr(*address)
|
||||
return host_connection.get_address_conn(addr)
|
||||
return host_connection.get_device_connection(addr)
|
||||
|
||||
|
||||
def get_resource(domain_name: str, domain_data: ConfigType) -> str:
|
||||
@@ -246,27 +246,33 @@ def register_lcn_address_devices(
|
||||
|
||||
|
||||
async def async_update_device_config(
|
||||
device_connection: DeviceConnectionType, device_config: ConfigType
|
||||
device_connection: DeviceConnection, device_config: ConfigType
|
||||
) -> None:
|
||||
"""Fill missing values in device_config with infos from LCN bus."""
|
||||
# fetch serial info if device is module
|
||||
if not (is_group := device_config[CONF_ADDRESS][2]): # is module
|
||||
await device_connection.serial_known
|
||||
await device_connection.serials_known()
|
||||
if device_config[CONF_HARDWARE_SERIAL] == -1:
|
||||
device_config[CONF_HARDWARE_SERIAL] = device_connection.hardware_serial
|
||||
device_config[CONF_HARDWARE_SERIAL] = (
|
||||
device_connection.serials.hardware_serial
|
||||
)
|
||||
if device_config[CONF_SOFTWARE_SERIAL] == -1:
|
||||
device_config[CONF_SOFTWARE_SERIAL] = device_connection.software_serial
|
||||
device_config[CONF_SOFTWARE_SERIAL] = (
|
||||
device_connection.serials.software_serial
|
||||
)
|
||||
if device_config[CONF_HARDWARE_TYPE] == -1:
|
||||
device_config[CONF_HARDWARE_TYPE] = device_connection.hardware_type.value
|
||||
device_config[CONF_HARDWARE_TYPE] = (
|
||||
device_connection.serials.hardware_type.value
|
||||
)
|
||||
|
||||
# fetch name if device is module
|
||||
if device_config[CONF_NAME] != "":
|
||||
return
|
||||
|
||||
device_name = ""
|
||||
device_name: str | None = None
|
||||
if not is_group:
|
||||
device_name = await device_connection.request_name()
|
||||
if is_group or device_name == "":
|
||||
if is_group or device_name is None:
|
||||
module_type = "Group" if is_group else "Module"
|
||||
device_name = (
|
||||
f"{module_type} "
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for LCN lights."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
@@ -33,6 +34,7 @@ from .helpers import InputType, LcnConfigEntry
|
||||
BRIGHTNESS_SCALE = (1, 100)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_entities(
|
||||
@@ -100,18 +102,6 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
self._attr_color_mode = ColorMode.ONOFF
|
||||
self._attr_supported_color_modes = {self._attr_color_mode}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if ATTR_TRANSITION in kwargs:
|
||||
@@ -157,6 +147,12 @@ class LcnOutputLight(LcnEntity, LightEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set light state when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -184,18 +180,6 @@ class LcnRelayLight(LcnEntity, LightEntity):
|
||||
|
||||
self.output = pypck.lcn_defs.RelayPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8
|
||||
@@ -214,6 +198,10 @@ class LcnRelayLight(LcnEntity, LightEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set light state when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||
"iot_class": "local_push",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.8.12", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.9.4", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
||||
@@ -74,4 +74,4 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
Integration is not making any HTTP requests.
|
||||
strict-typing: todo
|
||||
strict-typing: done
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for LCN sensors."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
|
||||
@@ -40,6 +41,8 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
DEVICE_CLASS_MAPPING = {
|
||||
pypck.lcn_defs.VarUnit.CELSIUS: SensorDeviceClass.TEMPERATURE,
|
||||
@@ -128,17 +131,11 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
|
||||
)
|
||||
self._attr_device_class = DEVICE_CLASS_MAPPING.get(self.unit)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.variable)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.variable)
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_variable(
|
||||
self.variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
@@ -159,6 +156,8 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
|
||||
class LcnLedLogicSensor(LcnEntity, SensorEntity):
|
||||
"""Representation of a LCN sensor for leds and logicops."""
|
||||
|
||||
source: pypck.lcn_defs.LedPort | pypck.lcn_defs.LogicOpPort
|
||||
|
||||
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
|
||||
"""Initialize the LCN sensor."""
|
||||
super().__init__(config, config_entry)
|
||||
@@ -170,17 +169,11 @@ class LcnLedLogicSensor(LcnEntity, SensorEntity):
|
||||
config[CONF_DOMAIN_DATA][CONF_SOURCE]
|
||||
]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.source)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.source)
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_led_and_logic_ops(
|
||||
SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set sensor value when LCN input object (command) is received."""
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from enum import StrEnum, auto
|
||||
|
||||
import pypck
|
||||
from pypck.device import DeviceConnection
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -48,7 +49,7 @@ from .const import (
|
||||
VAR_UNITS,
|
||||
VARIABLES,
|
||||
)
|
||||
from .helpers import DeviceConnectionType, LcnConfigEntry, is_states_string
|
||||
from .helpers import LcnConfigEntry, is_states_string
|
||||
|
||||
|
||||
class LcnServiceCall:
|
||||
@@ -65,7 +66,7 @@ class LcnServiceCall:
|
||||
"""Initialize service call."""
|
||||
self.hass = hass
|
||||
|
||||
def get_device_connection(self, service: ServiceCall) -> DeviceConnectionType:
|
||||
def get_device_connection(self, service: ServiceCall) -> DeviceConnection:
|
||||
"""Get address connection object."""
|
||||
entries: list[LcnConfigEntry] = self.hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
@@ -380,9 +381,6 @@ class LockKeys(LcnServiceCall):
|
||||
else:
|
||||
await device_connection.lock_keys(table_id, states)
|
||||
|
||||
handler = device_connection.status_requests_handler
|
||||
await handler.request_status_locked_keys_timeout()
|
||||
|
||||
|
||||
class DynText(LcnServiceCall):
|
||||
"""Send dynamic text to LCN-GTxD displays."""
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for LCN switches."""
|
||||
|
||||
from collections.abc import Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
|
||||
@@ -17,6 +18,7 @@ from .entity import LcnEntity
|
||||
from .helpers import InputType, LcnConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
def add_lcn_switch_entities(
|
||||
@@ -77,18 +79,6 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
self.output = pypck.lcn_defs.OutputPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if not await self.device_connection.dim_output(self.output.value, 100, 0):
|
||||
@@ -103,6 +93,12 @@ class LcnOutputSwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_output(
|
||||
self.output, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -126,18 +122,6 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity):
|
||||
|
||||
self.output = pypck.lcn_defs.RelayPort[config[CONF_DOMAIN_DATA][CONF_OUTPUT]]
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.output)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.output)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
states = [pypck.lcn_defs.RelayStateModifier.NOCHANGE] * 8
|
||||
@@ -156,6 +140,10 @@ class LcnRelaySwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if not isinstance(input_obj, pypck.inputs.ModStatusRelays):
|
||||
@@ -179,22 +167,6 @@ class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity):
|
||||
]
|
||||
self.reg_id = pypck.lcn_defs.Var.to_set_point_id(self.setpoint_variable)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(
|
||||
self.setpoint_variable
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(
|
||||
self.setpoint_variable
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
if not await self.device_connection.lock_regulator(self.reg_id, True):
|
||||
@@ -209,6 +181,12 @@ class LcnRegulatorLockSwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_variable(
|
||||
self.setpoint_variable, SCAN_INTERVAL.seconds
|
||||
)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if (
|
||||
@@ -234,18 +212,6 @@ class LcnKeyLockSwitch(LcnEntity, SwitchEntity):
|
||||
self.table_id = ord(self.key.name[0]) - 65
|
||||
self.key_id = int(self.key.name[1]) - 1
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.activate_status_request_handler(self.key)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
if not self.device_connection.is_group:
|
||||
await self.device_connection.cancel_status_request_handler(self.key)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
states = [pypck.lcn_defs.KeyLockStateModifier.NOCHANGE] * 8
|
||||
@@ -268,6 +234,10 @@ class LcnKeyLockSwitch(LcnEntity, SwitchEntity):
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
await self.device_connection.request_status_locked_keys(SCAN_INTERVAL.seconds)
|
||||
|
||||
def input_received(self, input_obj: InputType) -> None:
|
||||
"""Set switch state when LCN input object (command) is received."""
|
||||
if (
|
||||
|
||||
@@ -7,6 +7,7 @@ from functools import wraps
|
||||
from typing import Any, Final
|
||||
|
||||
import lcn_frontend as lcn_panel
|
||||
from pypck.device import DeviceConnection
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import panel_custom, websocket_api
|
||||
@@ -37,7 +38,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
)
|
||||
from .helpers import (
|
||||
DeviceConnectionType,
|
||||
LcnConfigEntry,
|
||||
async_update_device_config,
|
||||
generate_unique_id,
|
||||
@@ -104,7 +104,9 @@ def get_config_entry(
|
||||
|
||||
@wraps(func)
|
||||
async def get_entry(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get config_entry."""
|
||||
if not (config_entry := hass.config_entries.async_get_entry(msg["entry_id"])):
|
||||
@@ -124,7 +126,7 @@ def get_config_entry(
|
||||
async def websocket_get_device_configs(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Get device configs."""
|
||||
@@ -144,7 +146,7 @@ async def websocket_get_device_configs(
|
||||
async def websocket_get_entity_configs(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Get entities configs."""
|
||||
@@ -175,14 +177,14 @@ async def websocket_get_entity_configs(
|
||||
async def websocket_scan_devices(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Scan for new devices."""
|
||||
host_connection = config_entry.runtime_data.connection
|
||||
await host_connection.scan_modules()
|
||||
|
||||
for device_connection in host_connection.address_conns.values():
|
||||
for device_connection in host_connection.device_connections.values():
|
||||
if not device_connection.is_group:
|
||||
await async_create_or_update_device_in_config_entry(
|
||||
hass, device_connection, config_entry
|
||||
@@ -207,7 +209,7 @@ async def websocket_scan_devices(
|
||||
async def websocket_add_device(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Add a device."""
|
||||
@@ -253,7 +255,7 @@ async def websocket_add_device(
|
||||
async def websocket_delete_device(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Delete a device."""
|
||||
@@ -315,7 +317,7 @@ async def websocket_delete_device(
|
||||
async def websocket_add_entity(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Add an entity."""
|
||||
@@ -381,7 +383,7 @@ async def websocket_add_entity(
|
||||
async def websocket_delete_entity(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Delete an entity."""
|
||||
@@ -421,7 +423,7 @@ async def websocket_delete_entity(
|
||||
|
||||
async def async_create_or_update_device_in_config_entry(
|
||||
hass: HomeAssistant,
|
||||
device_connection: DeviceConnectionType,
|
||||
device_connection: DeviceConnection,
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Create or update device in config_entry according to given device_connection."""
|
||||
@@ -451,7 +453,7 @@ async def async_create_or_update_device_in_config_entry(
|
||||
|
||||
|
||||
def get_entity_entry(
|
||||
hass: HomeAssistant, entity_config: dict, config_entry: LcnConfigEntry
|
||||
hass: HomeAssistant, entity_config: dict[str, Any], config_entry: LcnConfigEntry
|
||||
) -> er.RegistryEntry | None:
|
||||
"""Get entity RegistryEntry from entity_config."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
131
homeassistant/components/light/condition.py
Normal file
131
homeassistant/components/light/condition.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Provides conditions for lights."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import TYPE_CHECKING, Any, Final, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS, CONF_TARGET, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv, target
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_BEHAVIOR: Final = "behavior"
|
||||
BEHAVIOR_ANY: Final = "any"
|
||||
BEHAVIOR_ALL: Final = "all"
|
||||
|
||||
|
||||
STATE_CONDITION_VALID_STATES: Final = [STATE_ON, STATE_OFF]
|
||||
STATE_CONDITION_OPTIONS_SCHEMA: dict[vol.Marker, Any] = {
|
||||
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
|
||||
[BEHAVIOR_ANY, BEHAVIOR_ALL]
|
||||
),
|
||||
}
|
||||
STATE_CONDITION_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
vol.Required(CONF_OPTIONS): STATE_CONDITION_OPTIONS_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class StateConditionBase(Condition):
|
||||
"""State condition."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return STATE_CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config: ConditionConfig, state: str
|
||||
) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
if TYPE_CHECKING:
|
||||
assert config.target
|
||||
assert config.options
|
||||
self._target = config.target
|
||||
self._behavior = config.options[ATTR_BEHAVIOR]
|
||||
self._state = state
|
||||
|
||||
@override
|
||||
async def async_get_checker(self) -> ConditionCheckerType:
|
||||
"""Get the condition checker."""
|
||||
|
||||
def check_any_match_state(states: list[str]) -> bool:
|
||||
"""Test if any entity match the state."""
|
||||
return any(state == self._state for state in states)
|
||||
|
||||
def check_all_match_state(states: list[str]) -> bool:
|
||||
"""Test if all entities match the state."""
|
||||
return all(state == self._state for state in states)
|
||||
|
||||
matcher: Callable[[list[str]], bool]
|
||||
if self._behavior == BEHAVIOR_ANY:
|
||||
matcher = check_any_match_state
|
||||
elif self._behavior == BEHAVIOR_ALL:
|
||||
matcher = check_all_match_state
|
||||
|
||||
@trace_condition_function
|
||||
def test_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
||||
"""Test state condition."""
|
||||
selector_data = target.TargetSelectorData(self._target)
|
||||
targeted_entities = target.async_extract_referenced_entity_ids(
|
||||
hass, selector_data, expand_group=False
|
||||
)
|
||||
referenced_entity_ids = targeted_entities.referenced.union(
|
||||
targeted_entities.indirectly_referenced
|
||||
)
|
||||
light_entity_ids = {
|
||||
entity_id
|
||||
for entity_id in referenced_entity_ids
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
light_entity_states = [
|
||||
state.state
|
||||
for entity_id in light_entity_ids
|
||||
if (state := hass.states.get(entity_id))
|
||||
and state.state in STATE_CONDITION_VALID_STATES
|
||||
]
|
||||
return matcher(light_entity_states)
|
||||
|
||||
return test_state
|
||||
|
||||
|
||||
class IsOnCondition(StateConditionBase):
|
||||
"""Is on condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
super().__init__(hass, config, STATE_ON)
|
||||
|
||||
|
||||
class IsOffCondition(StateConditionBase):
|
||||
"""Is off condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
super().__init__(hass, config, STATE_OFF)
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_off": IsOffCondition,
|
||||
"is_on": IsOnCondition,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the light conditions."""
|
||||
return CONDITIONS
|
||||
28
homeassistant/components/light/conditions.yaml
Normal file
28
homeassistant/components/light/conditions.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
is_off:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
is_on:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"condition": "mdi:lightbulb-off"
|
||||
},
|
||||
"is_on": {
|
||||
"condition": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:lightbulb",
|
||||
@@ -25,5 +33,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:lightbulb-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"trigger": "mdi:lightbulb-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:lightbulb-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,30 @@
|
||||
"field_xy_color_name": "XY-color",
|
||||
"section_advanced_fields_name": "Advanced options"
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Test if a light is off.",
|
||||
"description_configured": "[%key:component::light::conditions::is_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "How the state should match on the targeted lights.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "If a light is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Test if a light is on.",
|
||||
"description_configured": "[%key:component::light::conditions::is_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "How the state should match on the targeted lights.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "If a light is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"brightness_decrease": "Decrease {entity_name} brightness",
|
||||
@@ -284,11 +308,30 @@
|
||||
"yellowgreen": "Yellow green"
|
||||
}
|
||||
},
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"flash": {
|
||||
"options": {
|
||||
"long": "Long",
|
||||
"short": "Short"
|
||||
}
|
||||
},
|
||||
"state": {
|
||||
"options": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -462,5 +505,29 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Light"
|
||||
"title": "Light",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a light is turned off.",
|
||||
"description_configured": "[%key:component::light::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted lights to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a light is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a light is turned on.",
|
||||
"description_configured": "[%key:component::light::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::triggers::turned_off::fields::behavior::description%]",
|
||||
"name": "[%key:component::light::triggers::turned_off::fields::behavior::name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a light is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
homeassistant/components/light/trigger.py
Normal file
17
homeassistant/components/light/trigger.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides triggers for lights."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for lights."""
|
||||
return TRIGGERS
|
||||
18
homeassistant/components/light/triggers.yaml
Normal file
18
homeassistant/components/light/triggers.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
@@ -104,5 +104,10 @@
|
||||
"volume_up": {
|
||||
"service": "mdi:volume-plus"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"stopped_playing": {
|
||||
"trigger": "mdi:stop"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,6 +177,13 @@
|
||||
"off": "[%key:common::state::off%]",
|
||||
"one": "Repeat one"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -367,5 +374,18 @@
|
||||
"name": "Turn up volume"
|
||||
}
|
||||
},
|
||||
"title": "Media player"
|
||||
"title": "Media player",
|
||||
"triggers": {
|
||||
"stopped_playing": {
|
||||
"description": "Triggers when a media player stops playing.",
|
||||
"description_configured": "[%key:component::media_player::triggers::stopped_playing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "The behavior of the targeted media players to trigger on.",
|
||||
"name": "Behavior"
|
||||
}
|
||||
},
|
||||
"name": "When a media player stops playing"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
41
homeassistant/components/media_player/trigger.py
Normal file
41
homeassistant/components/media_player/trigger.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Provides triggers for media players."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.helpers.trigger import EntityTriggerBase, Trigger
|
||||
|
||||
from . import MediaPlayerState
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class MediaPlayerStoppedPlayingTrigger(EntityTriggerBase):
|
||||
"""Class for media player stopped playing trigger."""
|
||||
|
||||
_domain: str = DOMAIN
|
||||
_from_states = {
|
||||
MediaPlayerState.BUFFERING,
|
||||
MediaPlayerState.PAUSED,
|
||||
MediaPlayerState.PLAYING,
|
||||
}
|
||||
_to_states = {
|
||||
MediaPlayerState.IDLE,
|
||||
MediaPlayerState.OFF,
|
||||
MediaPlayerState.ON,
|
||||
}
|
||||
|
||||
def is_from_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the origin state."""
|
||||
return state.state in self._from_states
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
return state.state in self._to_states
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"stopped_playing": MediaPlayerStoppedPlayingTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for media players."""
|
||||
return TRIGGERS
|
||||
15
homeassistant/components/media_player/triggers.yaml
Normal file
15
homeassistant/components/media_player/triggers.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
stopped_playing:
|
||||
target:
|
||||
entity:
|
||||
domain: media_player
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
@@ -41,9 +41,11 @@ from .const import (
|
||||
DATA_CONFIG_ENTRIES,
|
||||
DATA_DELETED_IDS,
|
||||
DATA_DEVICES,
|
||||
DATA_PENDING_UPDATES,
|
||||
DATA_PUSH_CHANNEL,
|
||||
DATA_STORE,
|
||||
DOMAIN,
|
||||
SENSOR_TYPES,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
@@ -75,6 +77,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
DATA_DEVICES: {},
|
||||
DATA_PUSH_CHANNEL: {},
|
||||
DATA_STORE: store,
|
||||
DATA_PENDING_UPDATES: {sensor_type: {} for sensor_type in SENSOR_TYPES},
|
||||
}
|
||||
|
||||
hass.http.register_view(RegistrationsView())
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_WEBHOOK_ID, STATE_ON
|
||||
from homeassistant.const import CONF_WEBHOOK_ID, STATE_ON, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -75,8 +75,9 @@ class MobileAppBinarySensor(MobileAppEntity, BinarySensorEntity):
|
||||
|
||||
async def async_restore_last_state(self, last_state: State) -> None:
|
||||
"""Restore previous state."""
|
||||
await super().async_restore_last_state(last_state)
|
||||
self._config[ATTR_SENSOR_STATE] = last_state.state == STATE_ON
|
||||
if self._config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
|
||||
await super().async_restore_last_state(last_state)
|
||||
self._config[ATTR_SENSOR_STATE] = last_state.state == STATE_ON
|
||||
self._async_update_attr_from_config()
|
||||
|
||||
@callback
|
||||
|
||||
@@ -20,6 +20,7 @@ DATA_DEVICES = "devices"
|
||||
DATA_STORE = "store"
|
||||
DATA_NOTIFY = "notify"
|
||||
DATA_PUSH_CHANNEL = "push_channel"
|
||||
DATA_PENDING_UPDATES = "pending_updates"
|
||||
|
||||
ATTR_APP_DATA = "app_data"
|
||||
ATTR_APP_ID = "app_id"
|
||||
@@ -94,3 +95,5 @@ SCHEMA_APP_DATA = vol.Schema(
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SENSOR_TYPES = (ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR)
|
||||
|
||||
@@ -2,10 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ICON, CONF_NAME, CONF_UNIQUE_ID, STATE_UNAVAILABLE
|
||||
from homeassistant.const import (
|
||||
ATTR_ICON,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import State, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
@@ -18,10 +24,15 @@ from .const import (
|
||||
ATTR_SENSOR_ICON,
|
||||
ATTR_SENSOR_STATE,
|
||||
ATTR_SENSOR_STATE_CLASS,
|
||||
ATTR_SENSOR_TYPE,
|
||||
DATA_PENDING_UPDATES,
|
||||
DOMAIN,
|
||||
SIGNAL_SENSOR_UPDATE,
|
||||
)
|
||||
from .helpers import device_info
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MobileAppEntity(RestoreEntity):
|
||||
"""Representation of a mobile app entity."""
|
||||
@@ -56,11 +67,14 @@ class MobileAppEntity(RestoreEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_SENSOR_UPDATE}-{self._attr_unique_id}",
|
||||
f"{SIGNAL_SENSOR_UPDATE}-{self._config[ATTR_SENSOR_TYPE]}-{self._attr_unique_id}",
|
||||
self._handle_update,
|
||||
)
|
||||
)
|
||||
|
||||
# Apply any pending updates
|
||||
self._handle_update()
|
||||
|
||||
if (state := await self.async_get_last_state()) is None:
|
||||
return
|
||||
|
||||
@@ -69,13 +83,16 @@ class MobileAppEntity(RestoreEntity):
|
||||
async def async_restore_last_state(self, last_state: State) -> None:
|
||||
"""Restore previous state."""
|
||||
config = self._config
|
||||
config[ATTR_SENSOR_STATE] = last_state.state
|
||||
config[ATTR_SENSOR_ATTRIBUTES] = {
|
||||
**last_state.attributes,
|
||||
**self._config[ATTR_SENSOR_ATTRIBUTES],
|
||||
}
|
||||
if ATTR_ICON in last_state.attributes:
|
||||
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
|
||||
|
||||
# Only restore state if we don't have one already, since it can be set by a pending update
|
||||
if config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
|
||||
config[ATTR_SENSOR_STATE] = last_state.state
|
||||
config[ATTR_SENSOR_ATTRIBUTES] = {
|
||||
**last_state.attributes,
|
||||
**self._config[ATTR_SENSOR_ATTRIBUTES],
|
||||
}
|
||||
if ATTR_ICON in last_state.attributes:
|
||||
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
@@ -83,8 +100,21 @@ class MobileAppEntity(RestoreEntity):
|
||||
return device_info(self._registration)
|
||||
|
||||
@callback
|
||||
def _handle_update(self, data: dict[str, Any]) -> None:
|
||||
def _handle_update(self) -> None:
|
||||
"""Handle async event updates."""
|
||||
self._config.update(data)
|
||||
self._apply_pending_update()
|
||||
self._async_update_attr_from_config()
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _apply_pending_update(self) -> None:
|
||||
"""Restore any pending update for this entity."""
|
||||
entity_type = self._config[ATTR_SENSOR_TYPE]
|
||||
pending_updates = self.hass.data[DOMAIN][DATA_PENDING_UPDATES][entity_type]
|
||||
if update := pending_updates.pop(self._attr_unique_id, None):
|
||||
_LOGGER.debug(
|
||||
"Applying pending update for %s: %s",
|
||||
self._attr_unique_id,
|
||||
update,
|
||||
)
|
||||
# Apply the pending update
|
||||
self._config.update(update)
|
||||
|
||||
@@ -86,24 +86,26 @@ class MobileAppSensor(MobileAppEntity, RestoreSensor):
|
||||
|
||||
async def async_restore_last_state(self, last_state: State) -> None:
|
||||
"""Restore previous state."""
|
||||
await super().async_restore_last_state(last_state)
|
||||
config = self._config
|
||||
if not (last_sensor_data := await self.async_get_last_sensor_data()):
|
||||
# Workaround to handle migration to RestoreSensor, can be removed
|
||||
# in HA Core 2023.4
|
||||
config[ATTR_SENSOR_STATE] = None
|
||||
webhook_id = self._entry.data[CONF_WEBHOOK_ID]
|
||||
if TYPE_CHECKING:
|
||||
assert self.unique_id is not None
|
||||
sensor_unique_id = _extract_sensor_unique_id(webhook_id, self.unique_id)
|
||||
if (
|
||||
self.device_class == SensorDeviceClass.TEMPERATURE
|
||||
and sensor_unique_id == "battery_temperature"
|
||||
):
|
||||
config[ATTR_SENSOR_UOM] = UnitOfTemperature.CELSIUS
|
||||
else:
|
||||
config[ATTR_SENSOR_STATE] = last_sensor_data.native_value
|
||||
config[ATTR_SENSOR_UOM] = last_sensor_data.native_unit_of_measurement
|
||||
if config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
|
||||
await super().async_restore_last_state(last_state)
|
||||
|
||||
if not (last_sensor_data := await self.async_get_last_sensor_data()):
|
||||
# Workaround to handle migration to RestoreSensor, can be removed
|
||||
# in HA Core 2023.4
|
||||
config[ATTR_SENSOR_STATE] = None
|
||||
webhook_id = self._entry.data[CONF_WEBHOOK_ID]
|
||||
if TYPE_CHECKING:
|
||||
assert self.unique_id is not None
|
||||
sensor_unique_id = _extract_sensor_unique_id(webhook_id, self.unique_id)
|
||||
if (
|
||||
self.device_class == SensorDeviceClass.TEMPERATURE
|
||||
and sensor_unique_id == "battery_temperature"
|
||||
):
|
||||
config[ATTR_SENSOR_UOM] = UnitOfTemperature.CELSIUS
|
||||
else:
|
||||
config[ATTR_SENSOR_STATE] = last_sensor_data.native_value
|
||||
config[ATTR_SENSOR_UOM] = last_sensor_data.native_unit_of_measurement
|
||||
|
||||
self._async_update_attr_from_config()
|
||||
|
||||
|
||||
@@ -79,7 +79,6 @@ from .const import (
|
||||
ATTR_SENSOR_STATE,
|
||||
ATTR_SENSOR_STATE_CLASS,
|
||||
ATTR_SENSOR_TYPE,
|
||||
ATTR_SENSOR_TYPE_BINARY_SENSOR,
|
||||
ATTR_SENSOR_TYPE_SENSOR,
|
||||
ATTR_SENSOR_UNIQUE_ID,
|
||||
ATTR_SENSOR_UOM,
|
||||
@@ -98,12 +97,14 @@ from .const import (
|
||||
DATA_CONFIG_ENTRIES,
|
||||
DATA_DELETED_IDS,
|
||||
DATA_DEVICES,
|
||||
DATA_PENDING_UPDATES,
|
||||
DOMAIN,
|
||||
ERR_ENCRYPTION_ALREADY_ENABLED,
|
||||
ERR_ENCRYPTION_REQUIRED,
|
||||
ERR_INVALID_FORMAT,
|
||||
ERR_SENSOR_NOT_REGISTERED,
|
||||
SCHEMA_APP_DATA,
|
||||
SENSOR_TYPES,
|
||||
SIGNAL_LOCATION_UPDATE,
|
||||
SIGNAL_SENSOR_UPDATE,
|
||||
)
|
||||
@@ -125,8 +126,6 @@ WEBHOOK_COMMANDS: Registry[
|
||||
str, Callable[[HomeAssistant, ConfigEntry, Any], Coroutine[Any, Any, Response]]
|
||||
] = Registry()
|
||||
|
||||
SENSOR_TYPES = (ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR)
|
||||
|
||||
WEBHOOK_PAYLOAD_SCHEMA = vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
@@ -601,14 +600,16 @@ async def webhook_register_sensor(
|
||||
if changes:
|
||||
entity_registry.async_update_entity(existing_sensor, **changes)
|
||||
|
||||
async_dispatcher_send(hass, f"{SIGNAL_SENSOR_UPDATE}-{unique_store_key}", data)
|
||||
_async_update_sensor_entity(
|
||||
hass, entity_type=entity_type, unique_store_key=unique_store_key, data=data
|
||||
)
|
||||
else:
|
||||
data[CONF_UNIQUE_ID] = unique_store_key
|
||||
data[CONF_NAME] = (
|
||||
f"{config_entry.data[ATTR_DEVICE_NAME]} {data[ATTR_SENSOR_NAME]}"
|
||||
)
|
||||
|
||||
register_signal = f"{DOMAIN}_{data[ATTR_SENSOR_TYPE]}_register"
|
||||
register_signal = f"{DOMAIN}_{entity_type}_register"
|
||||
async_dispatcher_send(hass, register_signal, data)
|
||||
|
||||
return webhook_response(
|
||||
@@ -685,10 +686,12 @@ async def webhook_update_sensor_states(
|
||||
continue
|
||||
|
||||
sensor[CONF_WEBHOOK_ID] = config_entry.data[CONF_WEBHOOK_ID]
|
||||
async_dispatcher_send(
|
||||
|
||||
_async_update_sensor_entity(
|
||||
hass,
|
||||
f"{SIGNAL_SENSOR_UPDATE}-{unique_store_key}",
|
||||
sensor,
|
||||
entity_type=entity_type,
|
||||
unique_store_key=unique_store_key,
|
||||
data=sensor,
|
||||
)
|
||||
|
||||
resp[unique_id] = {"success": True}
|
||||
@@ -697,11 +700,26 @@ async def webhook_update_sensor_states(
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
|
||||
if entry and entry.disabled_by:
|
||||
# Inform the app that the entity is disabled
|
||||
resp[unique_id]["is_disabled"] = True
|
||||
|
||||
return webhook_response(resp, registration=config_entry.data)
|
||||
|
||||
|
||||
def _async_update_sensor_entity(
|
||||
hass: HomeAssistant, entity_type: str, unique_store_key: str, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Update a sensor entity with new data."""
|
||||
# Replace existing pending update with the latest sensor data.
|
||||
hass.data[DOMAIN][DATA_PENDING_UPDATES][entity_type][unique_store_key] = data
|
||||
|
||||
# The signal might not be handled if the entity was just enabled, but the data is stored
|
||||
# in pending updates and will be applied on entity initialization.
|
||||
async_dispatcher_send(
|
||||
hass, f"{SIGNAL_SENSOR_UPDATE}-{entity_type}-{unique_store_key}"
|
||||
)
|
||||
|
||||
|
||||
@WEBHOOK_COMMANDS.register("get_zones")
|
||||
async def webhook_get_zones(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, data: Any
|
||||
|
||||
@@ -4237,7 +4237,8 @@ class MQTTSubentryFlowHandler(ConfigSubentryFlow):
|
||||
return self.async_show_form(
|
||||
step_id="entity",
|
||||
data_schema=data_schema,
|
||||
description_placeholders={
|
||||
description_placeholders=TRANSLATION_DESCRIPTION_PLACEHOLDERS
|
||||
| {
|
||||
"mqtt_device": device_name,
|
||||
"entity_name_label": entity_name_label,
|
||||
"platform_label": platform_label,
|
||||
|
||||
@@ -27,7 +27,8 @@ from homeassistant.helpers.issue_registry import (
|
||||
)
|
||||
|
||||
from .const import ATTR_CONF_EXPOSE_PLAYER_TO_HA, DOMAIN, LOGGER
|
||||
from .services import get_music_assistant_client, register_actions
|
||||
from .helpers import get_music_assistant_client
|
||||
from .services import register_actions
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_models.event import MassEvent
|
||||
|
||||
@@ -4,11 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import functools
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from music_assistant_models.errors import MusicAssistantError
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
|
||||
def catch_musicassistant_error[**_P, _R](
|
||||
@@ -26,3 +33,16 @@ def catch_musicassistant_error[**_P, _R](
|
||||
raise HomeAssistantError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@callback
|
||||
def get_music_assistant_client(
|
||||
hass: HomeAssistant, config_entry_id: str
|
||||
) -> MusicAssistantClient:
|
||||
"""Get the Music Assistant client for the given config entry."""
|
||||
entry: MusicAssistantConfigEntry | None
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
return entry.runtime_data.mass
|
||||
|
||||
@@ -22,11 +22,9 @@ from music_assistant_models.errors import MediaNotFoundError
|
||||
from music_assistant_models.event import MassEvent
|
||||
from music_assistant_models.media_items import ItemMapping, MediaItemType, Track
|
||||
from music_assistant_models.player_queue import PlayerQueue
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
ATTR_MEDIA_EXTRA,
|
||||
BrowseMedia,
|
||||
MediaPlayerDeviceClass,
|
||||
@@ -41,38 +39,26 @@ from homeassistant.components.media_player import (
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.const import ATTR_NAME, STATE_OFF, Platform
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
from .const import (
|
||||
ATTR_ACTIVE,
|
||||
ATTR_ACTIVE_QUEUE,
|
||||
ATTR_ALBUM,
|
||||
ATTR_ANNOUNCE_VOLUME,
|
||||
ATTR_ARTIST,
|
||||
ATTR_AUTO_PLAY,
|
||||
ATTR_CURRENT_INDEX,
|
||||
ATTR_CURRENT_ITEM,
|
||||
ATTR_ELAPSED_TIME,
|
||||
ATTR_ITEMS,
|
||||
ATTR_MASS_PLAYER_TYPE,
|
||||
ATTR_MEDIA_ID,
|
||||
ATTR_MEDIA_TYPE,
|
||||
ATTR_NEXT_ITEM,
|
||||
ATTR_QUEUE_ID,
|
||||
ATTR_RADIO_MODE,
|
||||
ATTR_REPEAT_MODE,
|
||||
ATTR_SHUFFLE_ENABLED,
|
||||
ATTR_SOURCE_PLAYER,
|
||||
ATTR_URL,
|
||||
ATTR_USE_PRE_ANNOUNCE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .entity import MusicAssistantEntity
|
||||
@@ -122,11 +108,6 @@ REPEAT_MODE_MAPPING_TO_HA = {
|
||||
# UNKNOWN is intentionally not mapped - will return None
|
||||
}
|
||||
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
SERVICE_GET_QUEUE = "get_queue"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -143,44 +124,6 @@ async def async_setup_entry(
|
||||
# register callback to add players when they are discovered
|
||||
entry.runtime_data.platform_handlers.setdefault(Platform.MEDIA_PLAYER, add_player)
|
||||
|
||||
# add platform service for play_media with advanced options
|
||||
platform = async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType),
|
||||
vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption),
|
||||
vol.Optional(ATTR_ARTIST): cv.string,
|
||||
vol.Optional(ATTR_ALBUM): cv.string,
|
||||
vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool),
|
||||
},
|
||||
"_async_handle_play_media",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PLAY_ANNOUNCEMENT,
|
||||
{
|
||||
vol.Required(ATTR_URL): cv.string,
|
||||
vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool),
|
||||
vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int),
|
||||
},
|
||||
"_async_handle_play_announcement",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
{
|
||||
vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id,
|
||||
vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool),
|
||||
},
|
||||
"_async_handle_transfer_queue",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_GET_QUEUE,
|
||||
schema=None,
|
||||
func="_async_handle_get_queue",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
"""Representation of MediaPlayerEntity from Music Assistant Player."""
|
||||
|
||||
@@ -4,10 +4,13 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from music_assistant_models.enums import MediaType
|
||||
from music_assistant_models.enums import MediaType, QueueOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
DOMAIN as MEDIA_PLAYER_DOMAIN,
|
||||
)
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
@@ -17,31 +20,41 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ALBUM_ARTISTS_ONLY,
|
||||
ATTR_ALBUM_TYPE,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ANNOUNCE_VOLUME,
|
||||
ATTR_ARTIST,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_AUTO_PLAY,
|
||||
ATTR_FAVORITE,
|
||||
ATTR_ITEMS,
|
||||
ATTR_LIBRARY_ONLY,
|
||||
ATTR_LIMIT,
|
||||
ATTR_MEDIA_ID,
|
||||
ATTR_MEDIA_TYPE,
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_RADIO,
|
||||
ATTR_RADIO_MODE,
|
||||
ATTR_SEARCH,
|
||||
ATTR_SEARCH_ALBUM,
|
||||
ATTR_SEARCH_ARTIST,
|
||||
ATTR_SEARCH_NAME,
|
||||
ATTR_SOURCE_PLAYER,
|
||||
ATTR_TRACKS,
|
||||
ATTR_URL,
|
||||
ATTR_USE_PRE_ANNOUNCE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .helpers import get_music_assistant_client
|
||||
from .schemas import (
|
||||
LIBRARY_RESULTS_SCHEMA,
|
||||
SEARCH_RESULT_SCHEMA,
|
||||
@@ -49,7 +62,6 @@ from .schemas import (
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.media_items import (
|
||||
Album,
|
||||
Artist,
|
||||
@@ -60,28 +72,18 @@ if TYPE_CHECKING:
|
||||
Track,
|
||||
)
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
SERVICE_SEARCH = "search"
|
||||
SERVICE_GET_LIBRARY = "get_library"
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
SERVICE_GET_QUEUE = "get_queue"
|
||||
|
||||
DEFAULT_OFFSET = 0
|
||||
DEFAULT_LIMIT = 25
|
||||
DEFAULT_SORT_ORDER = "name"
|
||||
|
||||
|
||||
@callback
|
||||
def get_music_assistant_client(
|
||||
hass: HomeAssistant, config_entry_id: str
|
||||
) -> MusicAssistantClient:
|
||||
"""Get the Music Assistant client for the given config entry."""
|
||||
entry: MusicAssistantConfigEntry | None
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
return entry.runtime_data.mass
|
||||
|
||||
|
||||
@callback
|
||||
def register_actions(hass: HomeAssistant) -> None:
|
||||
"""Register custom actions."""
|
||||
@@ -124,6 +126,55 @@ def register_actions(hass: HomeAssistant) -> None:
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
# Platform entity services
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType),
|
||||
vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption),
|
||||
vol.Optional(ATTR_ARTIST): cv.string,
|
||||
vol.Optional(ATTR_ALBUM): cv.string,
|
||||
vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool),
|
||||
},
|
||||
func="_async_handle_play_media",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_ANNOUNCEMENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_URL): cv.string,
|
||||
vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool),
|
||||
vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int),
|
||||
},
|
||||
func="_async_handle_play_announcement",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id,
|
||||
vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool),
|
||||
},
|
||||
func="_async_handle_transfer_queue",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="_async_handle_get_queue",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
async def handle_search(call: ServiceCall) -> ServiceResponse:
|
||||
"""Handle queue_command action."""
|
||||
|
||||
@@ -13,7 +13,7 @@ from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NSConfigEntry) -> bool:
|
||||
|
||||
120
homeassistant/components/nederlandse_spoorwegen/binary_sensor.py
Normal file
120
homeassistant/components/nederlandse_spoorwegen/binary_sensor.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""Support for Nederlandse Spoorwegen public transport."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from ns_api import Trip
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, INTEGRATION_TITLE, ROUTE_MODEL
|
||||
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0 # since we use coordinator pattern
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class NSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Nederlandse Spoorwegen sensor entity."""
|
||||
|
||||
value_fn: Callable[[Trip], bool]
|
||||
|
||||
|
||||
def get_delay(planned: datetime | None, actual: datetime | None) -> bool:
|
||||
"""Return True if delay is present, False otherwise."""
|
||||
return bool(planned and actual and planned != actual)
|
||||
|
||||
|
||||
BINARY_SENSOR_DESCRIPTIONS = [
|
||||
NSBinarySensorEntityDescription(
|
||||
key="is_departure_delayed",
|
||||
translation_key="is_departure_delayed",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda trip: get_delay(
|
||||
trip.departure_time_planned, trip.departure_time_actual
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSBinarySensorEntityDescription(
|
||||
key="is_arrival_delayed",
|
||||
translation_key="is_arrival_delayed",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda trip: get_delay(
|
||||
trip.arrival_time_planned, trip.arrival_time_actual
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSBinarySensorEntityDescription(
|
||||
key="is_going",
|
||||
translation_key="is_going",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda trip: trip.going,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: NSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the departure sensor from a config entry."""
|
||||
|
||||
coordinators = config_entry.runtime_data
|
||||
|
||||
for subentry_id, coordinator in coordinators.items():
|
||||
async_add_entities(
|
||||
(
|
||||
NSBinarySensor(coordinator, subentry_id, description)
|
||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||
),
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class NSBinarySensor(CoordinatorEntity[NSDataUpdateCoordinator], BinarySensorEntity):
|
||||
"""Generic NS binary sensor based on entity description."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_attribution = "Data provided by NS"
|
||||
entity_description: NSBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NSDataUpdateCoordinator,
|
||||
subentry_id: str,
|
||||
description: NSBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._subentry_id = subentry_id
|
||||
self._attr_unique_id = f"{subentry_id}-{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry_id)},
|
||||
name=coordinator.name,
|
||||
manufacturer=INTEGRATION_TITLE,
|
||||
model=ROUTE_MODEL,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
if not (trip := self.coordinator.data.first_trip):
|
||||
return None
|
||||
return self.entity_description.value_fn(trip)
|
||||
15
homeassistant/components/nederlandse_spoorwegen/icons.json
Normal file
15
homeassistant/components/nederlandse_spoorwegen/icons.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"is_arrival_delayed": {
|
||||
"default": "mdi:bell-alert-outline"
|
||||
},
|
||||
"is_departure_delayed": {
|
||||
"default": "mdi:bell-alert-outline"
|
||||
},
|
||||
"is_going": {
|
||||
"default": "mdi:bell-cancel-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ from datetime import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from ns_api import Trip
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -38,6 +39,33 @@ from .const import (
|
||||
)
|
||||
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
|
||||
|
||||
def _get_departure_time(trip: Trip | None) -> datetime | None:
|
||||
"""Get next departure time from trip data."""
|
||||
return trip.departure_time_actual or trip.departure_time_planned if trip else None
|
||||
|
||||
|
||||
def _get_time_str(time: datetime | None) -> str | None:
|
||||
"""Get time as string."""
|
||||
return time.strftime("%H:%M") if time else None
|
||||
|
||||
|
||||
def _get_route(trip: Trip | None) -> list[str]:
|
||||
"""Get the route as a list of station names from trip data."""
|
||||
if not trip or not (trip_parts := trip.trip_parts):
|
||||
return []
|
||||
route = []
|
||||
if departure := trip.departure:
|
||||
route.append(departure)
|
||||
route.extend(part.destination for part in trip_parts)
|
||||
return route
|
||||
|
||||
|
||||
def _get_delay(planned: datetime | None, actual: datetime | None) -> bool:
|
||||
"""Return True if delay is present, False otherwise."""
|
||||
return bool(planned and actual and planned != actual)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ROUTE_SCHEMA = vol.Schema(
|
||||
@@ -127,7 +155,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity):
|
||||
"""Implementation of a NS Departure Sensor."""
|
||||
"""Implementation of a NS Departure Sensor (legacy)."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
_attr_attribution = "Data provided by NS"
|
||||
@@ -163,94 +191,40 @@ class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity
|
||||
return None
|
||||
|
||||
first_trip = route_data.first_trip
|
||||
if first_trip.departure_time_actual:
|
||||
return first_trip.departure_time_actual
|
||||
return first_trip.departure_time_planned
|
||||
return _get_departure_time(first_trip)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
route_data = self.coordinator.data
|
||||
if not route_data:
|
||||
return None
|
||||
|
||||
first_trip = route_data.first_trip
|
||||
next_trip = route_data.next_trip
|
||||
first_trip = self.coordinator.data.first_trip
|
||||
next_trip = self.coordinator.data.next_trip
|
||||
|
||||
if not first_trip:
|
||||
return None
|
||||
|
||||
route = []
|
||||
if first_trip.trip_parts:
|
||||
route = [first_trip.departure]
|
||||
route.extend(k.destination for k in first_trip.trip_parts)
|
||||
status = first_trip.status
|
||||
|
||||
# Static attributes
|
||||
attributes = {
|
||||
return {
|
||||
"going": first_trip.going,
|
||||
"departure_time_planned": None,
|
||||
"departure_time_actual": None,
|
||||
"departure_delay": False,
|
||||
"departure_time_planned": _get_time_str(first_trip.departure_time_planned),
|
||||
"departure_time_actual": _get_time_str(first_trip.departure_time_actual),
|
||||
"departure_delay": _get_delay(
|
||||
first_trip.departure_time_planned,
|
||||
first_trip.departure_time_actual,
|
||||
),
|
||||
"departure_platform_planned": first_trip.departure_platform_planned,
|
||||
"departure_platform_actual": first_trip.departure_platform_actual,
|
||||
"arrival_time_planned": None,
|
||||
"arrival_time_actual": None,
|
||||
"arrival_delay": False,
|
||||
"arrival_time_planned": _get_time_str(first_trip.arrival_time_planned),
|
||||
"arrival_time_actual": _get_time_str(first_trip.arrival_time_actual),
|
||||
"arrival_delay": _get_delay(
|
||||
first_trip.arrival_time_planned,
|
||||
first_trip.arrival_time_actual,
|
||||
),
|
||||
"arrival_platform_planned": first_trip.arrival_platform_planned,
|
||||
"arrival_platform_actual": first_trip.arrival_platform_actual,
|
||||
"next": None,
|
||||
"status": first_trip.status.lower() if first_trip.status else None,
|
||||
"next": _get_time_str(_get_departure_time(next_trip)),
|
||||
"status": status.lower() if status else None,
|
||||
"transfers": first_trip.nr_transfers,
|
||||
"route": route,
|
||||
"route": _get_route(first_trip),
|
||||
"remarks": None,
|
||||
}
|
||||
|
||||
# Planned departure attributes
|
||||
if first_trip.departure_time_planned is not None:
|
||||
attributes["departure_time_planned"] = (
|
||||
first_trip.departure_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual departure attributes
|
||||
if first_trip.departure_time_actual is not None:
|
||||
attributes["departure_time_actual"] = (
|
||||
first_trip.departure_time_actual.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Delay departure attributes
|
||||
if (
|
||||
attributes["departure_time_planned"]
|
||||
and attributes["departure_time_actual"]
|
||||
and attributes["departure_time_planned"]
|
||||
!= attributes["departure_time_actual"]
|
||||
):
|
||||
attributes["departure_delay"] = True
|
||||
|
||||
# Planned arrival attributes
|
||||
if first_trip.arrival_time_planned is not None:
|
||||
attributes["arrival_time_planned"] = (
|
||||
first_trip.arrival_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual arrival attributes
|
||||
if first_trip.arrival_time_actual is not None:
|
||||
attributes["arrival_time_actual"] = first_trip.arrival_time_actual.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
|
||||
# Delay arrival attributes
|
||||
if (
|
||||
attributes["arrival_time_planned"]
|
||||
and attributes["arrival_time_actual"]
|
||||
and attributes["arrival_time_planned"] != attributes["arrival_time_actual"]
|
||||
):
|
||||
attributes["arrival_delay"] = True
|
||||
|
||||
# Next trip attributes
|
||||
if next_trip:
|
||||
if next_trip.departure_time_actual is not None:
|
||||
attributes["next"] = next_trip.departure_time_actual.strftime("%H:%M")
|
||||
elif next_trip.departure_time_planned is not None:
|
||||
attributes["next"] = next_trip.departure_time_planned.strftime("%H:%M")
|
||||
|
||||
return attributes
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user