mirror of
https://github.com/home-assistant/core.git
synced 2025-11-23 17:56:59 +00:00
Compare commits
28 Commits
tibber_dat
...
revert-156
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f0981c00e5 | ||
|
|
82d3190016 | ||
|
|
d8cbcc1977 | ||
|
|
4b69543515 | ||
|
|
97ef4a35b9 | ||
|
|
f782c78650 | ||
|
|
139ed34c74 | ||
|
|
7f14d013ac | ||
|
|
963e27dda4 | ||
|
|
b8e3d57fea | ||
|
|
0de2a16d0f | ||
|
|
c8c2413a09 | ||
|
|
291331f878 | ||
|
|
a13cdbdf3d | ||
|
|
1bf713f279 | ||
|
|
10c8ee417b | ||
|
|
b23134f4f1 | ||
|
|
f45a6f806b | ||
|
|
d3857a00d5 | ||
|
|
8c9b90a9f9 | ||
|
|
4eedc88935 | ||
|
|
343ea1b82d | ||
|
|
36e13653d2 | ||
|
|
80444b2165 | ||
|
|
262f06dd2b | ||
|
|
bd87119c2e | ||
|
|
0dfa037aa8 | ||
|
|
c32a471573 |
14
.github/workflows/builder.yml
vendored
14
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -227,7 +227,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -265,7 +265,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
@@ -418,7 +418,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
@@ -463,7 +463,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1736,6 +1736,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/victron_ble/ @rajlaud
|
||||
/tests/components/victron_ble/ @rajlaud
|
||||
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
|
||||
/tests/components/victron_remote_monitoring/ @AndyTempel
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
|
||||
5
homeassistant/brands/victron.json
Normal file
5
homeassistant/brands/victron.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "victron",
|
||||
"name": "Victron",
|
||||
"integrations": ["victron_ble", "victron_remote_monitoring"]
|
||||
}
|
||||
@@ -6,9 +6,8 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.core import Event, HassJob, HomeAssistant, callback
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
@@ -20,7 +19,7 @@ from .analytics import (
|
||||
EntityAnalyticsModifications,
|
||||
async_devices_payload,
|
||||
)
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
|
||||
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
|
||||
from .http import AnalyticsDevicesView
|
||||
|
||||
__all__ = [
|
||||
@@ -43,28 +42,9 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
|
||||
# Load stored data
|
||||
await analytics.load()
|
||||
|
||||
@callback
|
||||
def start_schedule(_event: Event) -> None:
|
||||
async def start_schedule(_event: Event) -> None:
|
||||
"""Start the send schedule after the started event."""
|
||||
# Wait 15 min after started
|
||||
async_call_later(
|
||||
hass,
|
||||
900,
|
||||
HassJob(
|
||||
analytics.send_analytics,
|
||||
name="analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Send every day
|
||||
async_track_time_interval(
|
||||
hass,
|
||||
analytics.send_analytics,
|
||||
INTERVAL,
|
||||
name="analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
await analytics.async_schedule()
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
|
||||
|
||||
@@ -111,7 +91,7 @@ async def websocket_analytics_preferences(
|
||||
analytics = hass.data[DATA_COMPONENT]
|
||||
|
||||
await analytics.save_preferences(preferences)
|
||||
await analytics.send_analytics()
|
||||
await analytics.async_schedule()
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
|
||||
@@ -7,6 +7,8 @@ from asyncio import timeout
|
||||
from collections.abc import Awaitable, Callable, Iterable, Mapping
|
||||
from dataclasses import asdict as dataclass_asdict, dataclass, field
|
||||
from datetime import datetime
|
||||
import random
|
||||
import time
|
||||
from typing import Any, Protocol
|
||||
import uuid
|
||||
|
||||
@@ -31,10 +33,18 @@ from homeassistant.const import (
|
||||
BASE_PLATFORMS,
|
||||
__version__ as HA_VERSION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HassJob,
|
||||
HomeAssistant,
|
||||
ReleaseChannel,
|
||||
callback,
|
||||
get_release_channel,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later, async_track_time_interval
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -51,6 +61,7 @@ from homeassistant.setup import async_get_loaded_integrations
|
||||
from .const import (
|
||||
ANALYTICS_ENDPOINT_URL,
|
||||
ANALYTICS_ENDPOINT_URL_DEV,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
ATTR_ADDON_COUNT,
|
||||
ATTR_ADDONS,
|
||||
ATTR_ARCH,
|
||||
@@ -71,6 +82,7 @@ from .const import (
|
||||
ATTR_PROTECTED,
|
||||
ATTR_RECORDER,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_STATE_COUNT,
|
||||
ATTR_STATISTICS,
|
||||
ATTR_SUPERVISOR,
|
||||
@@ -80,8 +92,10 @@ from .const import (
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
DOMAIN,
|
||||
INTERVAL,
|
||||
LOGGER,
|
||||
PREFERENCE_SCHEMA,
|
||||
SNAPSHOT_VERSION,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
@@ -194,13 +208,18 @@ def gen_uuid() -> str:
|
||||
return uuid.uuid4().hex
|
||||
|
||||
|
||||
RELEASE_CHANNEL = get_release_channel()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnalyticsData:
|
||||
"""Analytics data."""
|
||||
|
||||
onboarded: bool
|
||||
preferences: dict[str, bool]
|
||||
uuid: str | None
|
||||
uuid: str | None = None
|
||||
submission_identifier: str | None = None
|
||||
snapshot_submission_time: float | None = None
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
|
||||
@@ -209,6 +228,8 @@ class AnalyticsData:
|
||||
data["onboarded"],
|
||||
data["preferences"],
|
||||
data["uuid"],
|
||||
data.get("submission_identifier"),
|
||||
data.get("snapshot_submission_time"),
|
||||
)
|
||||
|
||||
|
||||
@@ -219,8 +240,10 @@ class Analytics:
|
||||
"""Initialize the Analytics class."""
|
||||
self.hass: HomeAssistant = hass
|
||||
self.session = async_get_clientsession(hass)
|
||||
self._data = AnalyticsData(False, {}, None)
|
||||
self._data = AnalyticsData(False, {})
|
||||
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._basic_scheduled: CALLBACK_TYPE | None = None
|
||||
self._snapshot_scheduled: CALLBACK_TYPE | None = None
|
||||
|
||||
@property
|
||||
def preferences(self) -> dict:
|
||||
@@ -228,6 +251,7 @@ class Analytics:
|
||||
preferences = self._data.preferences
|
||||
return {
|
||||
ATTR_BASE: preferences.get(ATTR_BASE, False),
|
||||
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
|
||||
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
|
||||
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
|
||||
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
|
||||
@@ -244,9 +268,9 @@ class Analytics:
|
||||
return self._data.uuid
|
||||
|
||||
@property
|
||||
def endpoint(self) -> str:
|
||||
def endpoint_basic(self) -> str:
|
||||
"""Return the endpoint that will receive the payload."""
|
||||
if HA_VERSION.endswith("0.dev0"):
|
||||
if RELEASE_CHANNEL is ReleaseChannel.DEV:
|
||||
# dev installations will contact the dev analytics environment
|
||||
return ANALYTICS_ENDPOINT_URL_DEV
|
||||
return ANALYTICS_ENDPOINT_URL
|
||||
@@ -277,13 +301,17 @@ class Analytics:
|
||||
):
|
||||
self._data.preferences[ATTR_DIAGNOSTICS] = False
|
||||
|
||||
async def _save(self) -> None:
|
||||
"""Save data."""
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
|
||||
async def save_preferences(self, preferences: dict) -> None:
|
||||
"""Save preferences."""
|
||||
preferences = PREFERENCE_SCHEMA(preferences)
|
||||
self._data.preferences.update(preferences)
|
||||
self._data.onboarded = True
|
||||
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
await self._save()
|
||||
|
||||
if self.supervisor:
|
||||
await hassio.async_update_diagnostics(
|
||||
@@ -292,17 +320,16 @@ class Analytics:
|
||||
|
||||
async def send_analytics(self, _: datetime | None = None) -> None:
|
||||
"""Send analytics."""
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
return
|
||||
|
||||
hass = self.hass
|
||||
supervisor_info = None
|
||||
operating_system_info: dict[str, Any] = {}
|
||||
|
||||
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
|
||||
LOGGER.debug("Nothing to submit")
|
||||
return
|
||||
|
||||
if self._data.uuid is None:
|
||||
self._data.uuid = gen_uuid()
|
||||
await self._store.async_save(dataclass_asdict(self._data))
|
||||
await self._save()
|
||||
|
||||
if self.supervisor:
|
||||
supervisor_info = hassio.get_supervisor_info(hass)
|
||||
@@ -436,7 +463,7 @@ class Analytics:
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self.session.post(self.endpoint, json=payload)
|
||||
response = await self.session.post(self.endpoint_basic, json=payload)
|
||||
if response.status == 200:
|
||||
LOGGER.info(
|
||||
(
|
||||
@@ -449,7 +476,7 @@ class Analytics:
|
||||
LOGGER.warning(
|
||||
"Sending analytics failed with statuscode %s from %s",
|
||||
response.status,
|
||||
self.endpoint,
|
||||
self.endpoint_basic,
|
||||
)
|
||||
except TimeoutError:
|
||||
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
|
||||
@@ -489,6 +516,182 @@ class Analytics:
|
||||
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
|
||||
)
|
||||
|
||||
async def send_snapshot(self, _: datetime | None = None) -> None:
|
||||
"""Send a snapshot."""
|
||||
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
|
||||
return
|
||||
|
||||
payload = await _async_snapshot_payload(self.hass)
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": f"home-assistant/{HA_VERSION}",
|
||||
}
|
||||
if self._data.submission_identifier is not None:
|
||||
headers["X-Device-Database-Submission-Identifier"] = (
|
||||
self._data.submission_identifier
|
||||
)
|
||||
|
||||
try:
|
||||
async with timeout(30):
|
||||
response = await self.session.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL, json=payload, headers=headers
|
||||
)
|
||||
|
||||
if response.status == 200: # OK
|
||||
response_data = await response.json()
|
||||
new_identifier = response_data.get("submission_identifier")
|
||||
|
||||
if (
|
||||
new_identifier is not None
|
||||
and new_identifier != self._data.submission_identifier
|
||||
):
|
||||
self._data.submission_identifier = new_identifier
|
||||
await self._save()
|
||||
|
||||
LOGGER.info(
|
||||
"Submitted snapshot analytics to Home Assistant servers"
|
||||
)
|
||||
|
||||
elif response.status == 400: # Bad Request
|
||||
response_data = await response.json()
|
||||
error_kind = response_data.get("kind", "unknown")
|
||||
error_message = response_data.get("message", "Unknown error")
|
||||
|
||||
if error_kind == "invalid-submission-identifier":
|
||||
# Clear the invalid identifier and retry on next cycle
|
||||
LOGGER.warning(
|
||||
"Invalid submission identifier to %s, clearing: %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
error_message,
|
||||
)
|
||||
self._data.submission_identifier = None
|
||||
await self._save()
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Malformed snapshot analytics submission (%s) to %s: %s",
|
||||
error_kind,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
error_message,
|
||||
)
|
||||
|
||||
elif response.status == 503: # Service Unavailable
|
||||
response_text = await response.text()
|
||||
LOGGER.warning(
|
||||
"Snapshot analytics service %s unavailable: %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
response_text,
|
||||
)
|
||||
|
||||
else:
|
||||
LOGGER.warning(
|
||||
"Unexpected status code %s when submitting snapshot analytics to %s",
|
||||
response.status,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
)
|
||||
|
||||
except TimeoutError:
|
||||
LOGGER.error(
|
||||
"Timeout sending snapshot analytics to %s",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
)
|
||||
except aiohttp.ClientError as err:
|
||||
LOGGER.error(
|
||||
"Error sending snapshot analytics to %s: %r",
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
err,
|
||||
)
|
||||
|
||||
async def async_schedule(self) -> None:
|
||||
"""Schedule analytics."""
|
||||
if not self.onboarded:
|
||||
LOGGER.debug("Analytics not scheduled")
|
||||
if self._basic_scheduled is not None:
|
||||
self._basic_scheduled()
|
||||
self._basic_scheduled = None
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
self._snapshot_scheduled = None
|
||||
return
|
||||
|
||||
if not self.preferences.get(ATTR_BASE, False):
|
||||
LOGGER.debug("Basic analytics not scheduled")
|
||||
if self._basic_scheduled is not None:
|
||||
self._basic_scheduled()
|
||||
self._basic_scheduled = None
|
||||
elif self._basic_scheduled is None:
|
||||
# Wait 15 min after started for basic analytics
|
||||
self._basic_scheduled = async_call_later(
|
||||
self.hass,
|
||||
900,
|
||||
HassJob(
|
||||
self._async_schedule_basic,
|
||||
name="basic analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
if not self.preferences.get(ATTR_SNAPSHOTS, False) or RELEASE_CHANNEL not in (
|
||||
ReleaseChannel.DEV,
|
||||
ReleaseChannel.NIGHTLY,
|
||||
):
|
||||
LOGGER.debug("Snapshot analytics not scheduled")
|
||||
if self._snapshot_scheduled:
|
||||
self._snapshot_scheduled()
|
||||
self._snapshot_scheduled = None
|
||||
elif self._snapshot_scheduled is None:
|
||||
snapshot_submission_time = self._data.snapshot_submission_time
|
||||
|
||||
if snapshot_submission_time is None:
|
||||
# Randomize the submission time within the 24 hours
|
||||
snapshot_submission_time = random.uniform(0, 86400)
|
||||
self._data.snapshot_submission_time = snapshot_submission_time
|
||||
await self._save()
|
||||
LOGGER.debug(
|
||||
"Initialized snapshot submission time to %s",
|
||||
snapshot_submission_time,
|
||||
)
|
||||
|
||||
# Calculate delay until next submission
|
||||
current_time = time.time()
|
||||
delay = (snapshot_submission_time - current_time) % 86400
|
||||
|
||||
self._snapshot_scheduled = async_call_later(
|
||||
self.hass,
|
||||
delay,
|
||||
HassJob(
|
||||
self._async_schedule_snapshots,
|
||||
name="snapshot analytics schedule",
|
||||
cancel_on_shutdown=True,
|
||||
),
|
||||
)
|
||||
|
||||
async def _async_schedule_basic(self, _: datetime | None = None) -> None:
|
||||
"""Schedule basic analytics."""
|
||||
await self.send_analytics()
|
||||
|
||||
# Send basic analytics every day
|
||||
self._basic_scheduled = async_track_time_interval(
|
||||
self.hass,
|
||||
self.send_analytics,
|
||||
INTERVAL,
|
||||
name="basic analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
async def _async_schedule_snapshots(self, _: datetime | None = None) -> None:
|
||||
"""Schedule snapshot analytics."""
|
||||
await self.send_snapshot()
|
||||
|
||||
# Send snapshot analytics every day
|
||||
self._snapshot_scheduled = async_track_time_interval(
|
||||
self.hass,
|
||||
self.send_snapshot,
|
||||
INTERVAL,
|
||||
name="snapshot analytics daily",
|
||||
cancel_on_shutdown=True,
|
||||
)
|
||||
|
||||
|
||||
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
"""Extract domains from the YAML configuration."""
|
||||
@@ -505,8 +708,8 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices."""
|
||||
async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"""Return detailed information about entities and devices for a snapshot."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
@@ -711,8 +914,13 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return integrations_info
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices for a direct download."""
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"version": f"home-assistant:{SNAPSHOT_VERSION}",
|
||||
"home_assistant": HA_VERSION,
|
||||
"integrations": integrations_info,
|
||||
"integrations": await _async_snapshot_payload(hass),
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import voluptuous as vol
|
||||
|
||||
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
|
||||
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
|
||||
SNAPSHOT_VERSION = "1"
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL = f"https://device-database.eco-dev-aws.openhomefoundation.com/api/v1/snapshot/{SNAPSHOT_VERSION}"
|
||||
DOMAIN = "analytics"
|
||||
INTERVAL = timedelta(days=1)
|
||||
STORAGE_KEY = "core.analytics"
|
||||
@@ -38,6 +40,7 @@ ATTR_PREFERENCES = "preferences"
|
||||
ATTR_PROTECTED = "protected"
|
||||
ATTR_RECORDER = "recorder"
|
||||
ATTR_SLUG = "slug"
|
||||
ATTR_SNAPSHOTS = "snapshots"
|
||||
ATTR_STATE_COUNT = "state_count"
|
||||
ATTR_STATISTICS = "statistics"
|
||||
ATTR_SUPERVISOR = "supervisor"
|
||||
@@ -51,6 +54,7 @@ ATTR_VERSION = "version"
|
||||
PREFERENCE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BASE): bool,
|
||||
vol.Optional(ATTR_SNAPSHOTS): bool,
|
||||
vol.Optional(ATTR_DIAGNOSTICS): bool,
|
||||
vol.Optional(ATTR_STATISTICS): bool,
|
||||
vol.Optional(ATTR_USAGE): bool,
|
||||
|
||||
@@ -7,3 +7,26 @@ CONNECTION_TIMEOUT: int = 10
|
||||
|
||||
# Field name of last self test retrieved from apcupsd.
|
||||
LAST_S_TEST: Final = "laststest"
|
||||
|
||||
# Mapping of deprecated sensor keys (as reported by apcupsd, lower-cased) to their deprecation
|
||||
# repair issue translation keys.
|
||||
DEPRECATED_SENSORS: Final = {
|
||||
"apc": "apc_deprecated",
|
||||
"end apc": "date_deprecated",
|
||||
"date": "date_deprecated",
|
||||
"apcmodel": "available_via_device_info",
|
||||
"model": "available_via_device_info",
|
||||
"firmware": "available_via_device_info",
|
||||
"version": "available_via_device_info",
|
||||
"upsname": "available_via_device_info",
|
||||
"serialno": "available_via_device_info",
|
||||
}
|
||||
|
||||
AVAILABLE_VIA_DEVICE_ATTR: Final = {
|
||||
"apcmodel": "model",
|
||||
"model": "model",
|
||||
"firmware": "hw_version",
|
||||
"version": "sw_version",
|
||||
"upsname": "name",
|
||||
"serialno": "serial_number",
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -22,9 +24,11 @@ from homeassistant.const import (
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.issue_registry as ir
|
||||
|
||||
from .const import LAST_S_TEST
|
||||
from .const import AVAILABLE_VIA_DEVICE_ATTR, DEPRECATED_SENSORS, DOMAIN, LAST_S_TEST
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
from .entity import APCUPSdEntity
|
||||
|
||||
@@ -528,3 +532,62 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||
if not self.native_unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = inferred_unit
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle when entity is added to Home Assistant.
|
||||
|
||||
If this is a deprecated sensor entity, create a repair issue to guide
|
||||
the user to disable it.
|
||||
"""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
reason = DEPRECATED_SENSORS.get(self.entity_description.key)
|
||||
if not reason:
|
||||
return
|
||||
|
||||
automations = automations_with_entity(self.hass, self.entity_id)
|
||||
scripts = scripts_with_entity(self.hass, self.entity_id)
|
||||
if not automations and not scripts:
|
||||
return
|
||||
|
||||
entity_registry = er.async_get(self.hass)
|
||||
items = [
|
||||
f"- [{entry.name or entry.original_name or entity_id}]"
|
||||
f"(/config/{integration}/edit/{entry.unique_id or entity_id.split('.', 1)[-1]})"
|
||||
for integration, entities in (
|
||||
("automation", automations),
|
||||
("script", scripts),
|
||||
)
|
||||
for entity_id in entities
|
||||
if (entry := entity_registry.async_get(entity_id))
|
||||
]
|
||||
placeholders = {
|
||||
"entity_name": str(self.name or self.entity_id),
|
||||
"entity_id": self.entity_id,
|
||||
"items": "\n".join(items),
|
||||
}
|
||||
if via_attr := AVAILABLE_VIA_DEVICE_ATTR.get(self.entity_description.key):
|
||||
placeholders["available_via_device_attr"] = via_attr
|
||||
if device_entry := self.device_entry:
|
||||
placeholders["device_id"] = device_entry.id
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
f"{reason}_{self.entity_id}",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=reason,
|
||||
translation_placeholders=placeholders,
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle when entity will be removed from Home Assistant."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
if issue_key := DEPRECATED_SENSORS.get(self.entity_description.key):
|
||||
ir.async_delete_issue(self.hass, DOMAIN, f"{issue_key}_{self.entity_id}")
|
||||
|
||||
@@ -241,5 +241,19 @@
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to APC UPS Daemon."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"apc_deprecated": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because it exposes internal details of the APC UPS Daemon response.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use supported APC UPS entities instead. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
},
|
||||
"available_via_device_info": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the same value is available from the device registry via `device_attr(\"{device_id}\", \"{available_via_device_attr}\")`.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use the `device_attr` helper instead of this sensor. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
},
|
||||
"date_deprecated": {
|
||||
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the timestamp is already available from other APC UPS sensors via their last updated time.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to reference any entity's `last_updated` attribute instead (for example, `states.binary_sensor.apcups_online_status.last_updated`). Reload the APC UPS Daemon integration afterwards to resolve this issue.",
|
||||
"title": "{entity_name} sensor is deprecated"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
1
homeassistant/components/cosori/__init__.py
Normal file
1
homeassistant/components/cosori/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Cosori."""
|
||||
6
homeassistant/components/cosori/manifest.json
Normal file
6
homeassistant/components/cosori/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "cosori",
|
||||
"name": "Cosori",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "vesync"
|
||||
}
|
||||
@@ -9,6 +9,7 @@ from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import (
|
||||
CONF_AUTHORIZE_STRING,
|
||||
@@ -31,9 +32,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool
|
||||
expires_at=entry.data[CONF_EXPIRES_AT],
|
||||
)
|
||||
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
|
||||
ssl_context = get_default_context()
|
||||
|
||||
try:
|
||||
cync = await Cync.create(cync_auth)
|
||||
cync = await Cync.create(
|
||||
auth=cync_auth,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
except AuthFailedError as ex:
|
||||
raise ConfigEntryAuthFailed("User token invalid") from ex
|
||||
except CyncError as ex:
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioautomower"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioautomower==2.7.0"]
|
||||
"requirements": ["aioautomower==2.7.1"]
|
||||
}
|
||||
|
||||
@@ -112,6 +112,7 @@ async def async_setup_entry(
|
||||
update_method=async_update_data,
|
||||
# Polling interval. Will only be polled if there are subscribers.
|
||||
update_interval=timedelta(hours=1),
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
# Fetch initial data so we have data when entities subscribe
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import cast
|
||||
|
||||
import pypck
|
||||
from pypck.connection import (
|
||||
@@ -48,7 +49,6 @@ from .const import (
|
||||
)
|
||||
from .helpers import (
|
||||
AddressType,
|
||||
InputType,
|
||||
LcnConfigEntry,
|
||||
LcnRuntimeData,
|
||||
async_update_config_entry,
|
||||
@@ -285,7 +285,7 @@ def _async_fire_access_control_event(
|
||||
hass: HomeAssistant,
|
||||
device: dr.DeviceEntry | None,
|
||||
address: AddressType,
|
||||
inp: InputType,
|
||||
inp: pypck.inputs.ModStatusAccessControl,
|
||||
) -> None:
|
||||
"""Fire access control event (transponder, transmitter, fingerprint, codelock)."""
|
||||
event_data = {
|
||||
@@ -299,7 +299,11 @@ def _async_fire_access_control_event(
|
||||
|
||||
if inp.periphery == pypck.lcn_defs.AccessControlPeriphery.TRANSMITTER:
|
||||
event_data.update(
|
||||
{"level": inp.level, "key": inp.key, "action": inp.action.value}
|
||||
{
|
||||
"level": inp.level,
|
||||
"key": inp.key,
|
||||
"action": cast(pypck.lcn_defs.KeyAction, inp.action).value,
|
||||
}
|
||||
)
|
||||
|
||||
event_name = f"lcn_{inp.periphery.value.lower()}"
|
||||
@@ -310,7 +314,7 @@ def _async_fire_send_keys_event(
|
||||
hass: HomeAssistant,
|
||||
device: dr.DeviceEntry | None,
|
||||
address: AddressType,
|
||||
inp: InputType,
|
||||
inp: pypck.inputs.ModSendKeysHost,
|
||||
) -> None:
|
||||
"""Fire send_keys event."""
|
||||
for table, action in enumerate(inp.actions):
|
||||
|
||||
@@ -100,8 +100,6 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
self._max_temp = config[CONF_DOMAIN_DATA][CONF_MAX_TEMP]
|
||||
self._min_temp = config[CONF_DOMAIN_DATA][CONF_MIN_TEMP]
|
||||
|
||||
self._current_temperature = None
|
||||
self._target_temperature = None
|
||||
self._is_on = True
|
||||
|
||||
self._attr_hvac_modes = [HVACMode.HEAT]
|
||||
@@ -121,16 +119,6 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
return UnitOfTemperature.FAHRENHEIT
|
||||
return UnitOfTemperature.CELSIUS
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
return self._current_temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._target_temperature
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode.
|
||||
@@ -166,7 +154,7 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
):
|
||||
return
|
||||
self._is_on = False
|
||||
self._target_temperature = None
|
||||
self._attr_target_temperature = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
@@ -178,7 +166,7 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
self.setpoint, temperature, self.unit
|
||||
):
|
||||
return
|
||||
self._target_temperature = temperature
|
||||
self._attr_target_temperature = temperature
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
@@ -198,10 +186,14 @@ class LcnClimate(LcnEntity, ClimateEntity):
|
||||
return
|
||||
|
||||
if input_obj.get_var() == self.variable:
|
||||
self._current_temperature = input_obj.get_value().to_var_unit(self.unit)
|
||||
self._attr_current_temperature = float(
|
||||
input_obj.get_value().to_var_unit(self.unit)
|
||||
)
|
||||
elif input_obj.get_var() == self.setpoint:
|
||||
self._is_on = not input_obj.get_value().is_locked_regulator()
|
||||
if self._is_on:
|
||||
self._target_temperature = input_obj.get_value().to_var_unit(self.unit)
|
||||
self._attr_target_temperature = float(
|
||||
input_obj.get_value().to_var_unit(self.unit)
|
||||
)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -120,7 +120,7 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
errors={CONF_BASE: error},
|
||||
)
|
||||
|
||||
data: dict = {
|
||||
data: dict[str, Any] = {
|
||||
**user_input,
|
||||
CONF_DEVICES: [],
|
||||
CONF_ENTITIES: [],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Support for LCN covers."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Coroutine, Iterable
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
@@ -81,6 +81,8 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
|
||||
_attr_is_opening = False
|
||||
_attr_assumed_state = True
|
||||
|
||||
reverse_time: pypck.lcn_defs.MotorReverseTime | None
|
||||
|
||||
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
|
||||
"""Initialize the LCN cover."""
|
||||
super().__init__(config, config_entry)
|
||||
@@ -255,7 +257,15 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the state of the entity."""
|
||||
coros = [self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)]
|
||||
coros: list[
|
||||
Coroutine[
|
||||
Any,
|
||||
Any,
|
||||
pypck.inputs.ModStatusRelays
|
||||
| pypck.inputs.ModStatusMotorPositionBS4
|
||||
| None,
|
||||
]
|
||||
] = [self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)]
|
||||
if self.positioning_mode == pypck.lcn_defs.MotorPositioningMode.BS4:
|
||||
coros.append(
|
||||
self.device_connection.request_status_motor_position(
|
||||
@@ -283,7 +293,7 @@ class LcnRelayCover(LcnEntity, CoverEntity):
|
||||
)
|
||||
and input_obj.motor == self.motor.value
|
||||
):
|
||||
self._attr_current_cover_position = input_obj.position
|
||||
self._attr_current_cover_position = int(input_obj.position)
|
||||
if self._attr_current_cover_position in [0, 100]:
|
||||
self._attr_is_opening = False
|
||||
self._attr_is_closing = False
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from pypck.device import DeviceConnection
|
||||
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -10,7 +12,6 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from .const import CONF_DOMAIN_DATA, DOMAIN
|
||||
from .helpers import (
|
||||
AddressType,
|
||||
DeviceConnectionType,
|
||||
InputType,
|
||||
LcnConfigEntry,
|
||||
generate_unique_id,
|
||||
@@ -23,7 +24,7 @@ class LcnEntity(Entity):
|
||||
"""Parent class for all entities associated with the LCN component."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
device_connection: DeviceConnectionType
|
||||
device_connection: DeviceConnection
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -34,7 +35,7 @@ class LcnEntity(Entity):
|
||||
self.config = config
|
||||
self.config_entry = config_entry
|
||||
self.address: AddressType = config[CONF_ADDRESS]
|
||||
self._unregister_for_inputs: Callable | None = None
|
||||
self._unregister_for_inputs: Callable[[], None] | None = None
|
||||
self._name: str = config[CONF_NAME]
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={
|
||||
|
||||
@@ -11,6 +11,7 @@ from typing import cast
|
||||
|
||||
import pypck
|
||||
from pypck.connection import PchkConnectionManager
|
||||
from pypck.device import DeviceConnection
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -48,7 +49,7 @@ class LcnRuntimeData:
|
||||
connection: PchkConnectionManager
|
||||
"""Connection to PCHK host."""
|
||||
|
||||
device_connections: dict[str, DeviceConnectionType]
|
||||
device_connections: dict[str, DeviceConnection]
|
||||
"""Logical addresses of devices connected to the host."""
|
||||
|
||||
add_entities_callbacks: dict[str, Callable[[Iterable[ConfigType]], None]]
|
||||
@@ -59,9 +60,8 @@ class LcnRuntimeData:
|
||||
type LcnConfigEntry = ConfigEntry[LcnRuntimeData]
|
||||
|
||||
type AddressType = tuple[int, int, bool]
|
||||
type DeviceConnectionType = pypck.module.ModuleConnection | pypck.module.GroupConnection
|
||||
|
||||
type InputType = type[pypck.inputs.Input]
|
||||
type InputType = pypck.inputs.Input
|
||||
|
||||
# Regex for address validation
|
||||
PATTERN_ADDRESS = re.compile(
|
||||
@@ -82,11 +82,11 @@ DOMAIN_LOOKUP = {
|
||||
|
||||
def get_device_connection(
|
||||
hass: HomeAssistant, address: AddressType, config_entry: LcnConfigEntry
|
||||
) -> DeviceConnectionType:
|
||||
) -> DeviceConnection:
|
||||
"""Return a lcn device_connection."""
|
||||
host_connection = config_entry.runtime_data.connection
|
||||
addr = pypck.lcn_addr.LcnAddr(*address)
|
||||
return host_connection.get_address_conn(addr)
|
||||
return host_connection.get_device_connection(addr)
|
||||
|
||||
|
||||
def get_resource(domain_name: str, domain_data: ConfigType) -> str:
|
||||
@@ -246,7 +246,7 @@ def register_lcn_address_devices(
|
||||
|
||||
|
||||
async def async_update_device_config(
|
||||
device_connection: DeviceConnectionType, device_config: ConfigType
|
||||
device_connection: DeviceConnection, device_config: ConfigType
|
||||
) -> None:
|
||||
"""Fill missing values in device_config with infos from LCN bus."""
|
||||
# fetch serial info if device is module
|
||||
@@ -269,10 +269,10 @@ async def async_update_device_config(
|
||||
if device_config[CONF_NAME] != "":
|
||||
return
|
||||
|
||||
device_name = ""
|
||||
device_name: str | None = None
|
||||
if not is_group:
|
||||
device_name = await device_connection.request_name()
|
||||
if is_group or device_name == "":
|
||||
if is_group or device_name is None:
|
||||
module_type = "Group" if is_group else "Module"
|
||||
device_name = (
|
||||
f"{module_type} "
|
||||
|
||||
@@ -9,5 +9,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pypck"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pypck==0.9.2", "lcn-frontend==0.2.7"]
|
||||
"requirements": ["pypck==0.9.5", "lcn-frontend==0.2.7"]
|
||||
}
|
||||
|
||||
@@ -74,4 +74,4 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
Integration is not making any HTTP requests.
|
||||
strict-typing: todo
|
||||
strict-typing: done
|
||||
|
||||
@@ -156,6 +156,8 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
|
||||
class LcnLedLogicSensor(LcnEntity, SensorEntity):
|
||||
"""Representation of a LCN sensor for leds and logicops."""
|
||||
|
||||
source: pypck.lcn_defs.LedPort | pypck.lcn_defs.LogicOpPort
|
||||
|
||||
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
|
||||
"""Initialize the LCN sensor."""
|
||||
super().__init__(config, config_entry)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from enum import StrEnum, auto
|
||||
|
||||
import pypck
|
||||
from pypck.device import DeviceConnection
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
@@ -48,7 +49,7 @@ from .const import (
|
||||
VAR_UNITS,
|
||||
VARIABLES,
|
||||
)
|
||||
from .helpers import DeviceConnectionType, LcnConfigEntry, is_states_string
|
||||
from .helpers import LcnConfigEntry, is_states_string
|
||||
|
||||
|
||||
class LcnServiceCall:
|
||||
@@ -65,7 +66,7 @@ class LcnServiceCall:
|
||||
"""Initialize service call."""
|
||||
self.hass = hass
|
||||
|
||||
def get_device_connection(self, service: ServiceCall) -> DeviceConnectionType:
|
||||
def get_device_connection(self, service: ServiceCall) -> DeviceConnection:
|
||||
"""Get address connection object."""
|
||||
entries: list[LcnConfigEntry] = self.hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
|
||||
@@ -7,6 +7,7 @@ from functools import wraps
|
||||
from typing import Any, Final
|
||||
|
||||
import lcn_frontend as lcn_panel
|
||||
from pypck.device import DeviceConnection
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import panel_custom, websocket_api
|
||||
@@ -37,7 +38,6 @@ from .const import (
|
||||
DOMAIN,
|
||||
)
|
||||
from .helpers import (
|
||||
DeviceConnectionType,
|
||||
LcnConfigEntry,
|
||||
async_update_device_config,
|
||||
generate_unique_id,
|
||||
@@ -104,7 +104,9 @@ def get_config_entry(
|
||||
|
||||
@wraps(func)
|
||||
async def get_entry(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get config_entry."""
|
||||
if not (config_entry := hass.config_entries.async_get_entry(msg["entry_id"])):
|
||||
@@ -124,7 +126,7 @@ def get_config_entry(
|
||||
async def websocket_get_device_configs(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Get device configs."""
|
||||
@@ -144,7 +146,7 @@ async def websocket_get_device_configs(
|
||||
async def websocket_get_entity_configs(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Get entities configs."""
|
||||
@@ -175,14 +177,14 @@ async def websocket_get_entity_configs(
|
||||
async def websocket_scan_devices(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Scan for new devices."""
|
||||
host_connection = config_entry.runtime_data.connection
|
||||
await host_connection.scan_modules()
|
||||
|
||||
for device_connection in host_connection.address_conns.values():
|
||||
for device_connection in host_connection.device_connections.values():
|
||||
if not device_connection.is_group:
|
||||
await async_create_or_update_device_in_config_entry(
|
||||
hass, device_connection, config_entry
|
||||
@@ -207,7 +209,7 @@ async def websocket_scan_devices(
|
||||
async def websocket_add_device(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Add a device."""
|
||||
@@ -253,7 +255,7 @@ async def websocket_add_device(
|
||||
async def websocket_delete_device(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Delete a device."""
|
||||
@@ -315,7 +317,7 @@ async def websocket_delete_device(
|
||||
async def websocket_add_entity(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Add an entity."""
|
||||
@@ -381,7 +383,7 @@ async def websocket_add_entity(
|
||||
async def websocket_delete_entity(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict,
|
||||
msg: dict[str, Any],
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Delete an entity."""
|
||||
@@ -421,7 +423,7 @@ async def websocket_delete_entity(
|
||||
|
||||
async def async_create_or_update_device_in_config_entry(
|
||||
hass: HomeAssistant,
|
||||
device_connection: DeviceConnectionType,
|
||||
device_connection: DeviceConnection,
|
||||
config_entry: LcnConfigEntry,
|
||||
) -> None:
|
||||
"""Create or update device in config_entry according to given device_connection."""
|
||||
@@ -451,7 +453,7 @@ async def async_create_or_update_device_in_config_entry(
|
||||
|
||||
|
||||
def get_entity_entry(
|
||||
hass: HomeAssistant, entity_config: dict, config_entry: LcnConfigEntry
|
||||
hass: HomeAssistant, entity_config: dict[str, Any], config_entry: LcnConfigEntry
|
||||
) -> er.RegistryEntry | None:
|
||||
"""Get entity RegistryEntry from entity_config."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pylutron_caseta"],
|
||||
"requirements": ["pylutron-caseta==0.25.0"],
|
||||
"requirements": ["pylutron-caseta==0.26.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"properties": {
|
||||
|
||||
@@ -41,9 +41,11 @@ from .const import (
|
||||
DATA_CONFIG_ENTRIES,
|
||||
DATA_DELETED_IDS,
|
||||
DATA_DEVICES,
|
||||
DATA_PENDING_UPDATES,
|
||||
DATA_PUSH_CHANNEL,
|
||||
DATA_STORE,
|
||||
DOMAIN,
|
||||
SENSOR_TYPES,
|
||||
STORAGE_KEY,
|
||||
STORAGE_VERSION,
|
||||
)
|
||||
@@ -75,6 +77,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
DATA_DEVICES: {},
|
||||
DATA_PUSH_CHANNEL: {},
|
||||
DATA_STORE: store,
|
||||
DATA_PENDING_UPDATES: {sensor_type: {} for sensor_type in SENSOR_TYPES},
|
||||
}
|
||||
|
||||
hass.http.register_view(RegistrationsView())
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_WEBHOOK_ID, STATE_ON
|
||||
from homeassistant.const import CONF_WEBHOOK_ID, STATE_ON, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -75,8 +75,9 @@ class MobileAppBinarySensor(MobileAppEntity, BinarySensorEntity):
|
||||
|
||||
async def async_restore_last_state(self, last_state: State) -> None:
|
||||
"""Restore previous state."""
|
||||
await super().async_restore_last_state(last_state)
|
||||
self._config[ATTR_SENSOR_STATE] = last_state.state == STATE_ON
|
||||
if self._config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
|
||||
await super().async_restore_last_state(last_state)
|
||||
self._config[ATTR_SENSOR_STATE] = last_state.state == STATE_ON
|
||||
self._async_update_attr_from_config()
|
||||
|
||||
@callback
|
||||
|
||||
@@ -20,6 +20,7 @@ DATA_DEVICES = "devices"
|
||||
DATA_STORE = "store"
|
||||
DATA_NOTIFY = "notify"
|
||||
DATA_PUSH_CHANNEL = "push_channel"
|
||||
DATA_PENDING_UPDATES = "pending_updates"
|
||||
|
||||
ATTR_APP_DATA = "app_data"
|
||||
ATTR_APP_ID = "app_id"
|
||||
@@ -94,3 +95,5 @@ SCHEMA_APP_DATA = vol.Schema(
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SENSOR_TYPES = (ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR)
|
||||
|
||||
@@ -2,10 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ICON, CONF_NAME, CONF_UNIQUE_ID, STATE_UNAVAILABLE
|
||||
from homeassistant.const import (
|
||||
ATTR_ICON,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import State, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
@@ -18,10 +24,15 @@ from .const import (
|
||||
ATTR_SENSOR_ICON,
|
||||
ATTR_SENSOR_STATE,
|
||||
ATTR_SENSOR_STATE_CLASS,
|
||||
ATTR_SENSOR_TYPE,
|
||||
DATA_PENDING_UPDATES,
|
||||
DOMAIN,
|
||||
SIGNAL_SENSOR_UPDATE,
|
||||
)
|
||||
from .helpers import device_info
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MobileAppEntity(RestoreEntity):
|
||||
"""Representation of a mobile app entity."""
|
||||
@@ -56,11 +67,14 @@ class MobileAppEntity(RestoreEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_SENSOR_UPDATE}-{self._attr_unique_id}",
|
||||
f"{SIGNAL_SENSOR_UPDATE}-{self._config[ATTR_SENSOR_TYPE]}-{self._attr_unique_id}",
|
||||
self._handle_update,
|
||||
)
|
||||
)
|
||||
|
||||
# Apply any pending updates
|
||||
self._handle_update()
|
||||
|
||||
if (state := await self.async_get_last_state()) is None:
|
||||
return
|
||||
|
||||
@@ -69,13 +83,16 @@ class MobileAppEntity(RestoreEntity):
|
||||
async def async_restore_last_state(self, last_state: State) -> None:
|
||||
"""Restore previous state."""
|
||||
config = self._config
|
||||
config[ATTR_SENSOR_STATE] = last_state.state
|
||||
config[ATTR_SENSOR_ATTRIBUTES] = {
|
||||
**last_state.attributes,
|
||||
**self._config[ATTR_SENSOR_ATTRIBUTES],
|
||||
}
|
||||
if ATTR_ICON in last_state.attributes:
|
||||
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
|
||||
|
||||
# Only restore state if we don't have one already, since it can be set by a pending update
|
||||
if config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
|
||||
config[ATTR_SENSOR_STATE] = last_state.state
|
||||
config[ATTR_SENSOR_ATTRIBUTES] = {
|
||||
**last_state.attributes,
|
||||
**self._config[ATTR_SENSOR_ATTRIBUTES],
|
||||
}
|
||||
if ATTR_ICON in last_state.attributes:
|
||||
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
@@ -83,8 +100,21 @@ class MobileAppEntity(RestoreEntity):
|
||||
return device_info(self._registration)
|
||||
|
||||
@callback
|
||||
def _handle_update(self, data: dict[str, Any]) -> None:
|
||||
def _handle_update(self) -> None:
|
||||
"""Handle async event updates."""
|
||||
self._config.update(data)
|
||||
self._apply_pending_update()
|
||||
self._async_update_attr_from_config()
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _apply_pending_update(self) -> None:
|
||||
"""Restore any pending update for this entity."""
|
||||
entity_type = self._config[ATTR_SENSOR_TYPE]
|
||||
pending_updates = self.hass.data[DOMAIN][DATA_PENDING_UPDATES][entity_type]
|
||||
if update := pending_updates.pop(self._attr_unique_id, None):
|
||||
_LOGGER.debug(
|
||||
"Applying pending update for %s: %s",
|
||||
self._attr_unique_id,
|
||||
update,
|
||||
)
|
||||
# Apply the pending update
|
||||
self._config.update(update)
|
||||
|
||||
@@ -86,24 +86,26 @@ class MobileAppSensor(MobileAppEntity, RestoreSensor):
|
||||
|
||||
async def async_restore_last_state(self, last_state: State) -> None:
|
||||
"""Restore previous state."""
|
||||
await super().async_restore_last_state(last_state)
|
||||
config = self._config
|
||||
if not (last_sensor_data := await self.async_get_last_sensor_data()):
|
||||
# Workaround to handle migration to RestoreSensor, can be removed
|
||||
# in HA Core 2023.4
|
||||
config[ATTR_SENSOR_STATE] = None
|
||||
webhook_id = self._entry.data[CONF_WEBHOOK_ID]
|
||||
if TYPE_CHECKING:
|
||||
assert self.unique_id is not None
|
||||
sensor_unique_id = _extract_sensor_unique_id(webhook_id, self.unique_id)
|
||||
if (
|
||||
self.device_class == SensorDeviceClass.TEMPERATURE
|
||||
and sensor_unique_id == "battery_temperature"
|
||||
):
|
||||
config[ATTR_SENSOR_UOM] = UnitOfTemperature.CELSIUS
|
||||
else:
|
||||
config[ATTR_SENSOR_STATE] = last_sensor_data.native_value
|
||||
config[ATTR_SENSOR_UOM] = last_sensor_data.native_unit_of_measurement
|
||||
if config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
|
||||
await super().async_restore_last_state(last_state)
|
||||
|
||||
if not (last_sensor_data := await self.async_get_last_sensor_data()):
|
||||
# Workaround to handle migration to RestoreSensor, can be removed
|
||||
# in HA Core 2023.4
|
||||
config[ATTR_SENSOR_STATE] = None
|
||||
webhook_id = self._entry.data[CONF_WEBHOOK_ID]
|
||||
if TYPE_CHECKING:
|
||||
assert self.unique_id is not None
|
||||
sensor_unique_id = _extract_sensor_unique_id(webhook_id, self.unique_id)
|
||||
if (
|
||||
self.device_class == SensorDeviceClass.TEMPERATURE
|
||||
and sensor_unique_id == "battery_temperature"
|
||||
):
|
||||
config[ATTR_SENSOR_UOM] = UnitOfTemperature.CELSIUS
|
||||
else:
|
||||
config[ATTR_SENSOR_STATE] = last_sensor_data.native_value
|
||||
config[ATTR_SENSOR_UOM] = last_sensor_data.native_unit_of_measurement
|
||||
|
||||
self._async_update_attr_from_config()
|
||||
|
||||
|
||||
@@ -79,7 +79,6 @@ from .const import (
|
||||
ATTR_SENSOR_STATE,
|
||||
ATTR_SENSOR_STATE_CLASS,
|
||||
ATTR_SENSOR_TYPE,
|
||||
ATTR_SENSOR_TYPE_BINARY_SENSOR,
|
||||
ATTR_SENSOR_TYPE_SENSOR,
|
||||
ATTR_SENSOR_UNIQUE_ID,
|
||||
ATTR_SENSOR_UOM,
|
||||
@@ -98,12 +97,14 @@ from .const import (
|
||||
DATA_CONFIG_ENTRIES,
|
||||
DATA_DELETED_IDS,
|
||||
DATA_DEVICES,
|
||||
DATA_PENDING_UPDATES,
|
||||
DOMAIN,
|
||||
ERR_ENCRYPTION_ALREADY_ENABLED,
|
||||
ERR_ENCRYPTION_REQUIRED,
|
||||
ERR_INVALID_FORMAT,
|
||||
ERR_SENSOR_NOT_REGISTERED,
|
||||
SCHEMA_APP_DATA,
|
||||
SENSOR_TYPES,
|
||||
SIGNAL_LOCATION_UPDATE,
|
||||
SIGNAL_SENSOR_UPDATE,
|
||||
)
|
||||
@@ -125,8 +126,6 @@ WEBHOOK_COMMANDS: Registry[
|
||||
str, Callable[[HomeAssistant, ConfigEntry, Any], Coroutine[Any, Any, Response]]
|
||||
] = Registry()
|
||||
|
||||
SENSOR_TYPES = (ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR)
|
||||
|
||||
WEBHOOK_PAYLOAD_SCHEMA = vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
@@ -601,14 +600,16 @@ async def webhook_register_sensor(
|
||||
if changes:
|
||||
entity_registry.async_update_entity(existing_sensor, **changes)
|
||||
|
||||
async_dispatcher_send(hass, f"{SIGNAL_SENSOR_UPDATE}-{unique_store_key}", data)
|
||||
_async_update_sensor_entity(
|
||||
hass, entity_type=entity_type, unique_store_key=unique_store_key, data=data
|
||||
)
|
||||
else:
|
||||
data[CONF_UNIQUE_ID] = unique_store_key
|
||||
data[CONF_NAME] = (
|
||||
f"{config_entry.data[ATTR_DEVICE_NAME]} {data[ATTR_SENSOR_NAME]}"
|
||||
)
|
||||
|
||||
register_signal = f"{DOMAIN}_{data[ATTR_SENSOR_TYPE]}_register"
|
||||
register_signal = f"{DOMAIN}_{entity_type}_register"
|
||||
async_dispatcher_send(hass, register_signal, data)
|
||||
|
||||
return webhook_response(
|
||||
@@ -685,10 +686,12 @@ async def webhook_update_sensor_states(
|
||||
continue
|
||||
|
||||
sensor[CONF_WEBHOOK_ID] = config_entry.data[CONF_WEBHOOK_ID]
|
||||
async_dispatcher_send(
|
||||
|
||||
_async_update_sensor_entity(
|
||||
hass,
|
||||
f"{SIGNAL_SENSOR_UPDATE}-{unique_store_key}",
|
||||
sensor,
|
||||
entity_type=entity_type,
|
||||
unique_store_key=unique_store_key,
|
||||
data=sensor,
|
||||
)
|
||||
|
||||
resp[unique_id] = {"success": True}
|
||||
@@ -697,11 +700,26 @@ async def webhook_update_sensor_states(
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
|
||||
if entry and entry.disabled_by:
|
||||
# Inform the app that the entity is disabled
|
||||
resp[unique_id]["is_disabled"] = True
|
||||
|
||||
return webhook_response(resp, registration=config_entry.data)
|
||||
|
||||
|
||||
def _async_update_sensor_entity(
|
||||
hass: HomeAssistant, entity_type: str, unique_store_key: str, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Update a sensor entity with new data."""
|
||||
# Replace existing pending update with the latest sensor data.
|
||||
hass.data[DOMAIN][DATA_PENDING_UPDATES][entity_type][unique_store_key] = data
|
||||
|
||||
# The signal might not be handled if the entity was just enabled, but the data is stored
|
||||
# in pending updates and will be applied on entity initialization.
|
||||
async_dispatcher_send(
|
||||
hass, f"{SIGNAL_SENSOR_UPDATE}-{entity_type}-{unique_store_key}"
|
||||
)
|
||||
|
||||
|
||||
@WEBHOOK_COMMANDS.register("get_zones")
|
||||
async def webhook_get_zones(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, data: Any
|
||||
|
||||
@@ -27,7 +27,8 @@ from homeassistant.helpers.issue_registry import (
|
||||
)
|
||||
|
||||
from .const import ATTR_CONF_EXPOSE_PLAYER_TO_HA, DOMAIN, LOGGER
|
||||
from .services import get_music_assistant_client, register_actions
|
||||
from .helpers import get_music_assistant_client
|
||||
from .services import register_actions
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_models.event import MassEvent
|
||||
|
||||
@@ -4,11 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
import functools
|
||||
from typing import Any
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from music_assistant_models.errors import MusicAssistantError
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
|
||||
def catch_musicassistant_error[**_P, _R](
|
||||
@@ -26,3 +33,16 @@ def catch_musicassistant_error[**_P, _R](
|
||||
raise HomeAssistantError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@callback
|
||||
def get_music_assistant_client(
|
||||
hass: HomeAssistant, config_entry_id: str
|
||||
) -> MusicAssistantClient:
|
||||
"""Get the Music Assistant client for the given config entry."""
|
||||
entry: MusicAssistantConfigEntry | None
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
return entry.runtime_data.mass
|
||||
|
||||
@@ -22,11 +22,9 @@ from music_assistant_models.errors import MediaNotFoundError
|
||||
from music_assistant_models.event import MassEvent
|
||||
from music_assistant_models.media_items import ItemMapping, MediaItemType, Track
|
||||
from music_assistant_models.player_queue import PlayerQueue
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
ATTR_MEDIA_EXTRA,
|
||||
BrowseMedia,
|
||||
MediaPlayerDeviceClass,
|
||||
@@ -41,38 +39,26 @@ from homeassistant.components.media_player import (
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.const import ATTR_NAME, STATE_OFF, Platform
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utc_from_timestamp
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
from .const import (
|
||||
ATTR_ACTIVE,
|
||||
ATTR_ACTIVE_QUEUE,
|
||||
ATTR_ALBUM,
|
||||
ATTR_ANNOUNCE_VOLUME,
|
||||
ATTR_ARTIST,
|
||||
ATTR_AUTO_PLAY,
|
||||
ATTR_CURRENT_INDEX,
|
||||
ATTR_CURRENT_ITEM,
|
||||
ATTR_ELAPSED_TIME,
|
||||
ATTR_ITEMS,
|
||||
ATTR_MASS_PLAYER_TYPE,
|
||||
ATTR_MEDIA_ID,
|
||||
ATTR_MEDIA_TYPE,
|
||||
ATTR_NEXT_ITEM,
|
||||
ATTR_QUEUE_ID,
|
||||
ATTR_RADIO_MODE,
|
||||
ATTR_REPEAT_MODE,
|
||||
ATTR_SHUFFLE_ENABLED,
|
||||
ATTR_SOURCE_PLAYER,
|
||||
ATTR_URL,
|
||||
ATTR_USE_PRE_ANNOUNCE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .entity import MusicAssistantEntity
|
||||
@@ -122,11 +108,6 @@ REPEAT_MODE_MAPPING_TO_HA = {
|
||||
# UNKNOWN is intentionally not mapped - will return None
|
||||
}
|
||||
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
SERVICE_GET_QUEUE = "get_queue"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -143,44 +124,6 @@ async def async_setup_entry(
|
||||
# register callback to add players when they are discovered
|
||||
entry.runtime_data.platform_handlers.setdefault(Platform.MEDIA_PLAYER, add_player)
|
||||
|
||||
# add platform service for play_media with advanced options
|
||||
platform = async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
{
|
||||
vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType),
|
||||
vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption),
|
||||
vol.Optional(ATTR_ARTIST): cv.string,
|
||||
vol.Optional(ATTR_ALBUM): cv.string,
|
||||
vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool),
|
||||
},
|
||||
"_async_handle_play_media",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_PLAY_ANNOUNCEMENT,
|
||||
{
|
||||
vol.Required(ATTR_URL): cv.string,
|
||||
vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool),
|
||||
vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int),
|
||||
},
|
||||
"_async_handle_play_announcement",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
{
|
||||
vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id,
|
||||
vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool),
|
||||
},
|
||||
"_async_handle_transfer_queue",
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_GET_QUEUE,
|
||||
schema=None,
|
||||
func="_async_handle_get_queue",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
"""Representation of MediaPlayerEntity from Music Assistant Player."""
|
||||
|
||||
@@ -4,10 +4,13 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from music_assistant_models.enums import MediaType
|
||||
from music_assistant_models.enums import MediaType, QueueOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.components.media_player import (
|
||||
ATTR_MEDIA_ENQUEUE,
|
||||
DOMAIN as MEDIA_PLAYER_DOMAIN,
|
||||
)
|
||||
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
@@ -17,31 +20,41 @@ from homeassistant.core import (
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ALBUM_ARTISTS_ONLY,
|
||||
ATTR_ALBUM_TYPE,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ANNOUNCE_VOLUME,
|
||||
ATTR_ARTIST,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_AUTO_PLAY,
|
||||
ATTR_FAVORITE,
|
||||
ATTR_ITEMS,
|
||||
ATTR_LIBRARY_ONLY,
|
||||
ATTR_LIMIT,
|
||||
ATTR_MEDIA_ID,
|
||||
ATTR_MEDIA_TYPE,
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_RADIO,
|
||||
ATTR_RADIO_MODE,
|
||||
ATTR_SEARCH,
|
||||
ATTR_SEARCH_ALBUM,
|
||||
ATTR_SEARCH_ARTIST,
|
||||
ATTR_SEARCH_NAME,
|
||||
ATTR_SOURCE_PLAYER,
|
||||
ATTR_TRACKS,
|
||||
ATTR_URL,
|
||||
ATTR_USE_PRE_ANNOUNCE,
|
||||
DOMAIN,
|
||||
)
|
||||
from .helpers import get_music_assistant_client
|
||||
from .schemas import (
|
||||
LIBRARY_RESULTS_SCHEMA,
|
||||
SEARCH_RESULT_SCHEMA,
|
||||
@@ -49,7 +62,6 @@ from .schemas import (
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.media_items import (
|
||||
Album,
|
||||
Artist,
|
||||
@@ -60,28 +72,18 @@ if TYPE_CHECKING:
|
||||
Track,
|
||||
)
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
SERVICE_SEARCH = "search"
|
||||
SERVICE_GET_LIBRARY = "get_library"
|
||||
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
|
||||
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
|
||||
SERVICE_TRANSFER_QUEUE = "transfer_queue"
|
||||
SERVICE_GET_QUEUE = "get_queue"
|
||||
|
||||
DEFAULT_OFFSET = 0
|
||||
DEFAULT_LIMIT = 25
|
||||
DEFAULT_SORT_ORDER = "name"
|
||||
|
||||
|
||||
@callback
|
||||
def get_music_assistant_client(
|
||||
hass: HomeAssistant, config_entry_id: str
|
||||
) -> MusicAssistantClient:
|
||||
"""Get the Music Assistant client for the given config entry."""
|
||||
entry: MusicAssistantConfigEntry | None
|
||||
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
return entry.runtime_data.mass
|
||||
|
||||
|
||||
@callback
|
||||
def register_actions(hass: HomeAssistant) -> None:
|
||||
"""Register custom actions."""
|
||||
@@ -124,6 +126,55 @@ def register_actions(hass: HomeAssistant) -> None:
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
# Platform entity services
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType),
|
||||
vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption),
|
||||
vol.Optional(ATTR_ARTIST): cv.string,
|
||||
vol.Optional(ATTR_ALBUM): cv.string,
|
||||
vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool),
|
||||
},
|
||||
func="_async_handle_play_media",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_PLAY_ANNOUNCEMENT,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_URL): cv.string,
|
||||
vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool),
|
||||
vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int),
|
||||
},
|
||||
func="_async_handle_play_announcement",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_TRANSFER_QUEUE,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id,
|
||||
vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool),
|
||||
},
|
||||
func="_async_handle_transfer_queue",
|
||||
)
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
SERVICE_GET_QUEUE,
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="_async_handle_get_queue",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
|
||||
async def handle_search(call: ServiceCall) -> ServiceResponse:
|
||||
"""Handle queue_command action."""
|
||||
|
||||
@@ -13,7 +13,7 @@ from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NSConfigEntry) -> bool:
|
||||
|
||||
120
homeassistant/components/nederlandse_spoorwegen/binary_sensor.py
Normal file
120
homeassistant/components/nederlandse_spoorwegen/binary_sensor.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""Support for Nederlandse Spoorwegen public transport."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from ns_api import Trip
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, INTEGRATION_TITLE, ROUTE_MODEL
|
||||
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0 # since we use coordinator pattern
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class NSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describes Nederlandse Spoorwegen sensor entity."""
|
||||
|
||||
value_fn: Callable[[Trip], bool]
|
||||
|
||||
|
||||
def get_delay(planned: datetime | None, actual: datetime | None) -> bool:
|
||||
"""Return True if delay is present, False otherwise."""
|
||||
return bool(planned and actual and planned != actual)
|
||||
|
||||
|
||||
BINARY_SENSOR_DESCRIPTIONS = [
|
||||
NSBinarySensorEntityDescription(
|
||||
key="is_departure_delayed",
|
||||
translation_key="is_departure_delayed",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda trip: get_delay(
|
||||
trip.departure_time_planned, trip.departure_time_actual
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSBinarySensorEntityDescription(
|
||||
key="is_arrival_delayed",
|
||||
translation_key="is_arrival_delayed",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda trip: get_delay(
|
||||
trip.arrival_time_planned, trip.arrival_time_actual
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
NSBinarySensorEntityDescription(
|
||||
key="is_going",
|
||||
translation_key="is_going",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda trip: trip.going,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: NSConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the departure sensor from a config entry."""
|
||||
|
||||
coordinators = config_entry.runtime_data
|
||||
|
||||
for subentry_id, coordinator in coordinators.items():
|
||||
async_add_entities(
|
||||
(
|
||||
NSBinarySensor(coordinator, subentry_id, description)
|
||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||
),
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class NSBinarySensor(CoordinatorEntity[NSDataUpdateCoordinator], BinarySensorEntity):
|
||||
"""Generic NS binary sensor based on entity description."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_attribution = "Data provided by NS"
|
||||
entity_description: NSBinarySensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: NSDataUpdateCoordinator,
|
||||
subentry_id: str,
|
||||
description: NSBinarySensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the binary sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._subentry_id = subentry_id
|
||||
self._attr_unique_id = f"{subentry_id}-{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, subentry_id)},
|
||||
name=coordinator.name,
|
||||
manufacturer=INTEGRATION_TITLE,
|
||||
model=ROUTE_MODEL,
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""Return true if the binary sensor is on."""
|
||||
if not (trip := self.coordinator.data.first_trip):
|
||||
return None
|
||||
return self.entity_description.value_fn(trip)
|
||||
15
homeassistant/components/nederlandse_spoorwegen/icons.json
Normal file
15
homeassistant/components/nederlandse_spoorwegen/icons.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"is_arrival_delayed": {
|
||||
"default": "mdi:bell-alert-outline"
|
||||
},
|
||||
"is_departure_delayed": {
|
||||
"default": "mdi:bell-alert-outline"
|
||||
},
|
||||
"is_going": {
|
||||
"default": "mdi:bell-cancel-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ from datetime import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from ns_api import Trip
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -38,6 +39,33 @@ from .const import (
|
||||
)
|
||||
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
|
||||
|
||||
|
||||
def _get_departure_time(trip: Trip | None) -> datetime | None:
|
||||
"""Get next departure time from trip data."""
|
||||
return trip.departure_time_actual or trip.departure_time_planned if trip else None
|
||||
|
||||
|
||||
def _get_time_str(time: datetime | None) -> str | None:
|
||||
"""Get time as string."""
|
||||
return time.strftime("%H:%M") if time else None
|
||||
|
||||
|
||||
def _get_route(trip: Trip | None) -> list[str]:
|
||||
"""Get the route as a list of station names from trip data."""
|
||||
if not trip or not (trip_parts := trip.trip_parts):
|
||||
return []
|
||||
route = []
|
||||
if departure := trip.departure:
|
||||
route.append(departure)
|
||||
route.extend(part.destination for part in trip_parts)
|
||||
return route
|
||||
|
||||
|
||||
def _get_delay(planned: datetime | None, actual: datetime | None) -> bool:
|
||||
"""Return True if delay is present, False otherwise."""
|
||||
return bool(planned and actual and planned != actual)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ROUTE_SCHEMA = vol.Schema(
|
||||
@@ -127,7 +155,7 @@ async def async_setup_entry(
|
||||
|
||||
|
||||
class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity):
|
||||
"""Implementation of a NS Departure Sensor."""
|
||||
"""Implementation of a NS Departure Sensor (legacy)."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
||||
_attr_attribution = "Data provided by NS"
|
||||
@@ -163,94 +191,40 @@ class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity
|
||||
return None
|
||||
|
||||
first_trip = route_data.first_trip
|
||||
if first_trip.departure_time_actual:
|
||||
return first_trip.departure_time_actual
|
||||
return first_trip.departure_time_planned
|
||||
return _get_departure_time(first_trip)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
route_data = self.coordinator.data
|
||||
if not route_data:
|
||||
return None
|
||||
|
||||
first_trip = route_data.first_trip
|
||||
next_trip = route_data.next_trip
|
||||
first_trip = self.coordinator.data.first_trip
|
||||
next_trip = self.coordinator.data.next_trip
|
||||
|
||||
if not first_trip:
|
||||
return None
|
||||
|
||||
route = []
|
||||
if first_trip.trip_parts:
|
||||
route = [first_trip.departure]
|
||||
route.extend(k.destination for k in first_trip.trip_parts)
|
||||
status = first_trip.status
|
||||
|
||||
# Static attributes
|
||||
attributes = {
|
||||
return {
|
||||
"going": first_trip.going,
|
||||
"departure_time_planned": None,
|
||||
"departure_time_actual": None,
|
||||
"departure_delay": False,
|
||||
"departure_time_planned": _get_time_str(first_trip.departure_time_planned),
|
||||
"departure_time_actual": _get_time_str(first_trip.departure_time_actual),
|
||||
"departure_delay": _get_delay(
|
||||
first_trip.departure_time_planned,
|
||||
first_trip.departure_time_actual,
|
||||
),
|
||||
"departure_platform_planned": first_trip.departure_platform_planned,
|
||||
"departure_platform_actual": first_trip.departure_platform_actual,
|
||||
"arrival_time_planned": None,
|
||||
"arrival_time_actual": None,
|
||||
"arrival_delay": False,
|
||||
"arrival_time_planned": _get_time_str(first_trip.arrival_time_planned),
|
||||
"arrival_time_actual": _get_time_str(first_trip.arrival_time_actual),
|
||||
"arrival_delay": _get_delay(
|
||||
first_trip.arrival_time_planned,
|
||||
first_trip.arrival_time_actual,
|
||||
),
|
||||
"arrival_platform_planned": first_trip.arrival_platform_planned,
|
||||
"arrival_platform_actual": first_trip.arrival_platform_actual,
|
||||
"next": None,
|
||||
"status": first_trip.status.lower() if first_trip.status else None,
|
||||
"next": _get_time_str(_get_departure_time(next_trip)),
|
||||
"status": status.lower() if status else None,
|
||||
"transfers": first_trip.nr_transfers,
|
||||
"route": route,
|
||||
"route": _get_route(first_trip),
|
||||
"remarks": None,
|
||||
}
|
||||
|
||||
# Planned departure attributes
|
||||
if first_trip.departure_time_planned is not None:
|
||||
attributes["departure_time_planned"] = (
|
||||
first_trip.departure_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual departure attributes
|
||||
if first_trip.departure_time_actual is not None:
|
||||
attributes["departure_time_actual"] = (
|
||||
first_trip.departure_time_actual.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Delay departure attributes
|
||||
if (
|
||||
attributes["departure_time_planned"]
|
||||
and attributes["departure_time_actual"]
|
||||
and attributes["departure_time_planned"]
|
||||
!= attributes["departure_time_actual"]
|
||||
):
|
||||
attributes["departure_delay"] = True
|
||||
|
||||
# Planned arrival attributes
|
||||
if first_trip.arrival_time_planned is not None:
|
||||
attributes["arrival_time_planned"] = (
|
||||
first_trip.arrival_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual arrival attributes
|
||||
if first_trip.arrival_time_actual is not None:
|
||||
attributes["arrival_time_actual"] = first_trip.arrival_time_actual.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
|
||||
# Delay arrival attributes
|
||||
if (
|
||||
attributes["arrival_time_planned"]
|
||||
and attributes["arrival_time_actual"]
|
||||
and attributes["arrival_time_planned"] != attributes["arrival_time_actual"]
|
||||
):
|
||||
attributes["arrival_delay"] = True
|
||||
|
||||
# Next trip attributes
|
||||
if next_trip:
|
||||
if next_trip.departure_time_actual is not None:
|
||||
attributes["next"] = next_trip.departure_time_actual.strftime("%H:%M")
|
||||
elif next_trip.departure_time_planned is not None:
|
||||
attributes["next"] = next_trip.departure_time_planned.strftime("%H:%M")
|
||||
|
||||
return attributes
|
||||
|
||||
@@ -64,6 +64,19 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"is_arrival_delayed": {
|
||||
"name": "Arrival delayed"
|
||||
},
|
||||
"is_departure_delayed": {
|
||||
"name": "Departure delayed"
|
||||
},
|
||||
"is_going": {
|
||||
"name": "Going"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"description": "Configuring Nederlandse Spoorwegen using YAML sensor platform is deprecated.\n\nWhile importing your configuration, Home Assistant could not connect to the NS API. Please check your internet connection and the status of the NS API, then restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI.",
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
from .coordinator import (
|
||||
OhmeAdvancedSettingsCoordinator,
|
||||
OhmeChargeSessionCoordinator,
|
||||
OhmeConfigEntry,
|
||||
OhmeDeviceInfoCoordinator,
|
||||
@@ -56,7 +55,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool
|
||||
|
||||
coordinators = (
|
||||
OhmeChargeSessionCoordinator(hass, entry, client),
|
||||
OhmeAdvancedSettingsCoordinator(hass, entry, client),
|
||||
OhmeDeviceInfoCoordinator(hass, entry, client),
|
||||
)
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
from ohme import ApiException, OhmeApiClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -23,7 +23,6 @@ class OhmeRuntimeData:
|
||||
"""Dataclass to hold ohme coordinators."""
|
||||
|
||||
charge_session_coordinator: OhmeChargeSessionCoordinator
|
||||
advanced_settings_coordinator: OhmeAdvancedSettingsCoordinator
|
||||
device_info_coordinator: OhmeDeviceInfoCoordinator
|
||||
|
||||
|
||||
@@ -78,31 +77,6 @@ class OhmeChargeSessionCoordinator(OhmeBaseCoordinator):
|
||||
await self.client.async_get_charge_session()
|
||||
|
||||
|
||||
class OhmeAdvancedSettingsCoordinator(OhmeBaseCoordinator):
|
||||
"""Coordinator to pull settings and charger state from the API."""
|
||||
|
||||
coordinator_name = "Advanced Settings"
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: OhmeConfigEntry, client: OhmeApiClient
|
||||
) -> None:
|
||||
"""Initialise coordinator."""
|
||||
super().__init__(hass, config_entry, client)
|
||||
|
||||
@callback
|
||||
def _dummy_listener() -> None:
|
||||
pass
|
||||
|
||||
# This coordinator is used by the API library to determine whether the
|
||||
# charger is online and available. It is therefore required even if no
|
||||
# entities are using it.
|
||||
self.async_add_listener(_dummy_listener)
|
||||
|
||||
async def _internal_update_data(self) -> None:
|
||||
"""Fetch data from API endpoint."""
|
||||
await self.client.async_get_advanced_settings()
|
||||
|
||||
|
||||
class OhmeDeviceInfoCoordinator(OhmeBaseCoordinator):
|
||||
"""Coordinator to pull device info and charger settings from the API."""
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["ohme==1.5.2"]
|
||||
"requirements": ["ohme==1.6.0"]
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription):
|
||||
value_fn: Callable[[OhmeApiClient], str | int | float | None]
|
||||
|
||||
|
||||
SENSOR_CHARGE_SESSION = [
|
||||
SENSORS = [
|
||||
OhmeSensorDescription(
|
||||
key="status",
|
||||
translation_key="status",
|
||||
@@ -91,18 +91,6 @@ SENSOR_CHARGE_SESSION = [
|
||||
),
|
||||
]
|
||||
|
||||
SENSOR_ADVANCED_SETTINGS = [
|
||||
OhmeSensorDescription(
|
||||
key="ct_current",
|
||||
translation_key="ct_current",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
value_fn=lambda client: client.power.ct_amps,
|
||||
is_supported_fn=lambda client: client.ct_connected,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -110,16 +98,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors."""
|
||||
coordinators = config_entry.runtime_data
|
||||
coordinator_map = [
|
||||
(SENSOR_CHARGE_SESSION, coordinators.charge_session_coordinator),
|
||||
(SENSOR_ADVANCED_SETTINGS, coordinators.advanced_settings_coordinator),
|
||||
]
|
||||
coordinator = config_entry.runtime_data.charge_session_coordinator
|
||||
|
||||
async_add_entities(
|
||||
OhmeSensor(coordinator, description)
|
||||
for entities, coordinator in coordinator_map
|
||||
for description in entities
|
||||
for description in SENSORS
|
||||
if description.is_supported_fn(coordinator.client)
|
||||
)
|
||||
|
||||
|
||||
@@ -441,7 +441,7 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def is_matching(self, other_flow: Self) -> bool:
|
||||
"""Return True if other_flow is matching this flow."""
|
||||
return other_flow._host == self._host # noqa: SLF001
|
||||
return getattr(other_flow, "_host", None) == self._host
|
||||
|
||||
@callback
|
||||
def _abort_if_manufacturer_is_not_samsung(self) -> None:
|
||||
|
||||
@@ -1919,8 +1919,23 @@ class RpcSleepingSensor(ShellySleepingRpcAttributeEntity, RestoreSensor):
|
||||
super().__init__(coordinator, key, attribute, description, entry)
|
||||
self.restored_data: SensorExtraStoredData | None = None
|
||||
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
if coordinator.device.initialized:
|
||||
if hasattr(self, "_attr_name"):
|
||||
delattr(self, "_attr_name")
|
||||
|
||||
translation_placeholders, translation_key = (
|
||||
get_entity_translation_attributes(
|
||||
get_rpc_channel_name(coordinator.device, key),
|
||||
description.translation_key,
|
||||
description.device_class,
|
||||
self._default_to_device_class_name(),
|
||||
)
|
||||
)
|
||||
|
||||
if translation_placeholders:
|
||||
self._attr_translation_placeholders = translation_placeholders
|
||||
if translation_key:
|
||||
self._attr_translation_key = translation_key
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
|
||||
@@ -372,7 +372,7 @@ class _CustomDPCodeWrapper(DPCodeWrapper):
|
||||
_valid_values: set[bool | float | int | str]
|
||||
|
||||
def __init__(
|
||||
self, dpcode: str, valid_values: set[bool | float | int | str]
|
||||
self, dpcode: DPCode, valid_values: set[bool | float | int | str]
|
||||
) -> None:
|
||||
"""Init CustomDPCodeBooleanWrapper."""
|
||||
super().__init__(dpcode)
|
||||
@@ -390,7 +390,7 @@ def _get_dpcode_wrapper(
|
||||
description: TuyaBinarySensorEntityDescription,
|
||||
) -> DPCodeWrapper | None:
|
||||
"""Get DPCode wrapper for an entity description."""
|
||||
dpcode = description.dpcode or description.key
|
||||
dpcode = description.dpcode or DPCode(description.key)
|
||||
if description.bitmap_key is not None:
|
||||
return DPCodeBitmapBitWrapper.find_dpcode(
|
||||
device, dpcode, bitmap_key=description.bitmap_key
|
||||
|
||||
@@ -34,7 +34,6 @@ from .models import (
|
||||
DPCodeEnumWrapper,
|
||||
DPCodeIntegerWrapper,
|
||||
IntegerTypeData,
|
||||
find_dpcode,
|
||||
)
|
||||
from .util import get_dpcode, get_dptype, remap_value
|
||||
|
||||
@@ -108,6 +107,35 @@ class _BrightnessWrapper(DPCodeIntegerWrapper):
|
||||
return round(self.type_information.remap_value_from(value))
|
||||
|
||||
|
||||
class _ColorTempWrapper(DPCodeIntegerWrapper):
|
||||
"""Wrapper for color temperature DP code."""
|
||||
|
||||
def read_device_status(self, device: CustomerDevice) -> Any | None:
|
||||
"""Return the color temperature value in Kelvin."""
|
||||
if (temperature := self._read_device_status_raw(device)) is None:
|
||||
return None
|
||||
|
||||
return color_util.color_temperature_mired_to_kelvin(
|
||||
self.type_information.remap_value_to(
|
||||
temperature,
|
||||
MIN_MIREDS,
|
||||
MAX_MIREDS,
|
||||
reverse=True,
|
||||
)
|
||||
)
|
||||
|
||||
def _convert_value_to_raw_value(self, device: CustomerDevice, value: Any) -> Any:
|
||||
"""Convert a Home Assistant value (Kelvin) back to a raw device value."""
|
||||
return round(
|
||||
self.type_information.remap_value_from(
|
||||
color_util.color_temperature_kelvin_to_mired(value),
|
||||
MIN_MIREDS,
|
||||
MAX_MIREDS,
|
||||
reverse=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColorTypeData:
|
||||
"""Color Type Data."""
|
||||
@@ -118,15 +146,27 @@ class ColorTypeData:
|
||||
|
||||
|
||||
DEFAULT_COLOR_TYPE_DATA = ColorTypeData(
|
||||
h_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=360, step=1),
|
||||
s_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=255, step=1),
|
||||
v_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=255, step=1),
|
||||
h_type=IntegerTypeData(
|
||||
dpcode=DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=360, step=1
|
||||
),
|
||||
s_type=IntegerTypeData(
|
||||
dpcode=DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=255, step=1
|
||||
),
|
||||
v_type=IntegerTypeData(
|
||||
dpcode=DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=255, step=1
|
||||
),
|
||||
)
|
||||
|
||||
DEFAULT_COLOR_TYPE_DATA_V2 = ColorTypeData(
|
||||
h_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=360, step=1),
|
||||
s_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=1000, step=1),
|
||||
v_type=IntegerTypeData(DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=1000, step=1),
|
||||
h_type=IntegerTypeData(
|
||||
dpcode=DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=360, step=1
|
||||
),
|
||||
s_type=IntegerTypeData(
|
||||
dpcode=DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=1000, step=1
|
||||
),
|
||||
v_type=IntegerTypeData(
|
||||
dpcode=DPCode.COLOUR_DATA_HSV, min=1, scale=0, max=1000, step=1
|
||||
),
|
||||
)
|
||||
|
||||
MAX_MIREDS = 500 # 2000 K
|
||||
@@ -529,6 +569,9 @@ async def async_setup_entry(
|
||||
color_mode_wrapper=DPCodeEnumWrapper.find_dpcode(
|
||||
device, description.color_mode, prefer_function=True
|
||||
),
|
||||
color_temp_wrapper=_ColorTempWrapper.find_dpcode(
|
||||
device, description.color_temp, prefer_function=True
|
||||
),
|
||||
switch_wrapper=switch_wrapper,
|
||||
)
|
||||
for description in descriptions
|
||||
@@ -555,7 +598,6 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
|
||||
_color_data_dpcode: DPCode | None = None
|
||||
_color_data_type: ColorTypeData | None = None
|
||||
_color_temp: IntegerTypeData | None = None
|
||||
_white_color_mode = ColorMode.COLOR_TEMP
|
||||
_fixed_color_mode: ColorMode | None = None
|
||||
_attr_min_color_temp_kelvin = 2000 # 500 Mireds
|
||||
@@ -567,8 +609,9 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
device_manager: Manager,
|
||||
description: TuyaLightEntityDescription,
|
||||
*,
|
||||
brightness_wrapper: DPCodeIntegerWrapper | None,
|
||||
brightness_wrapper: _BrightnessWrapper | None,
|
||||
color_mode_wrapper: DPCodeEnumWrapper | None,
|
||||
color_temp_wrapper: _ColorTempWrapper | None,
|
||||
switch_wrapper: DPCodeBooleanWrapper,
|
||||
) -> None:
|
||||
"""Init TuyaHaLight."""
|
||||
@@ -577,6 +620,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
self._attr_unique_id = f"{super().unique_id}{description.key}"
|
||||
self._brightness_wrapper = brightness_wrapper
|
||||
self._color_mode_wrapper = color_mode_wrapper
|
||||
self._color_temp_wrapper = color_temp_wrapper
|
||||
self._switch_wrapper = switch_wrapper
|
||||
|
||||
color_modes: set[ColorMode] = {ColorMode.ONOFF}
|
||||
@@ -597,9 +641,15 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
# Fetch color data type information
|
||||
if function_data := json_loads_object(values):
|
||||
self._color_data_type = ColorTypeData(
|
||||
h_type=IntegerTypeData(dpcode, **cast(dict, function_data["h"])),
|
||||
s_type=IntegerTypeData(dpcode, **cast(dict, function_data["s"])),
|
||||
v_type=IntegerTypeData(dpcode, **cast(dict, function_data["v"])),
|
||||
h_type=IntegerTypeData(
|
||||
dpcode=dpcode, **cast(dict, function_data["h"])
|
||||
),
|
||||
s_type=IntegerTypeData(
|
||||
dpcode=dpcode, **cast(dict, function_data["s"])
|
||||
),
|
||||
v_type=IntegerTypeData(
|
||||
dpcode=dpcode, **cast(dict, function_data["v"])
|
||||
),
|
||||
)
|
||||
else:
|
||||
# If no type is found, use a default one
|
||||
@@ -611,13 +661,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
self._color_data_type = DEFAULT_COLOR_TYPE_DATA_V2
|
||||
|
||||
# Check if the light has color temperature
|
||||
if int_type := find_dpcode(
|
||||
self.device,
|
||||
description.color_temp,
|
||||
dptype=DPType.INTEGER,
|
||||
prefer_function=True,
|
||||
):
|
||||
self._color_temp = int_type
|
||||
if color_temp_wrapper:
|
||||
color_modes.add(ColorMode.COLOR_TEMP)
|
||||
# If light has color but does not have color_temp, check if it has
|
||||
# work_mode "white"
|
||||
@@ -654,21 +698,11 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
),
|
||||
]
|
||||
|
||||
if self._color_temp and ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
if self._color_temp_wrapper and ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
commands += [
|
||||
{
|
||||
"code": self._color_temp.dpcode,
|
||||
"value": round(
|
||||
self._color_temp.remap_value_from(
|
||||
color_util.color_temperature_kelvin_to_mired(
|
||||
kwargs[ATTR_COLOR_TEMP_KELVIN]
|
||||
),
|
||||
MIN_MIREDS,
|
||||
MAX_MIREDS,
|
||||
reverse=True,
|
||||
)
|
||||
),
|
||||
},
|
||||
self._color_temp_wrapper.get_update_command(
|
||||
self.device, kwargs[ATTR_COLOR_TEMP_KELVIN]
|
||||
)
|
||||
]
|
||||
|
||||
if self._color_data_type and (
|
||||
@@ -748,18 +782,7 @@ class TuyaLightEntity(TuyaEntity, LightEntity):
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the color temperature value in Kelvin."""
|
||||
if not self._color_temp:
|
||||
return None
|
||||
|
||||
temperature = self.device.status.get(self._color_temp.dpcode)
|
||||
if temperature is None:
|
||||
return None
|
||||
|
||||
return color_util.color_temperature_mired_to_kelvin(
|
||||
self._color_temp.remap_value_to(
|
||||
temperature, MIN_MIREDS, MAX_MIREDS, reverse=True
|
||||
)
|
||||
)
|
||||
return self._read_wrapper(self._color_temp_wrapper)
|
||||
|
||||
@property
|
||||
def hs_color(self) -> tuple[float, float] | None:
|
||||
|
||||
@@ -15,7 +15,7 @@ from .const import DPCode, DPType
|
||||
from .util import parse_dptype, remap_value
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(kw_only=True)
|
||||
class TypeInformation:
|
||||
"""Type information.
|
||||
|
||||
@@ -23,14 +23,15 @@ class TypeInformation:
|
||||
"""
|
||||
|
||||
dpcode: DPCode
|
||||
type_data: str | None = None
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
|
||||
def from_json(cls, dpcode: DPCode, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a TypeInformation object."""
|
||||
return cls(dpcode)
|
||||
return cls(dpcode=dpcode, type_data=type_data)
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(kw_only=True)
|
||||
class IntegerTypeData(TypeInformation):
|
||||
"""Integer Type Data."""
|
||||
|
||||
@@ -84,13 +85,14 @@ class IntegerTypeData(TypeInformation):
|
||||
return remap_value(value, from_min, from_max, self.min, self.max, reverse)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
|
||||
def from_json(cls, dpcode: DPCode, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a IntegerTypeData object."""
|
||||
if not (parsed := cast(dict[str, Any] | None, json_loads_object(data))):
|
||||
if not (parsed := cast(dict[str, Any] | None, json_loads_object(type_data))):
|
||||
return None
|
||||
|
||||
return cls(
|
||||
dpcode,
|
||||
dpcode=dpcode,
|
||||
type_data=type_data,
|
||||
min=int(parsed["min"]),
|
||||
max=int(parsed["max"]),
|
||||
scale=int(parsed["scale"]),
|
||||
@@ -99,32 +101,40 @@ class IntegerTypeData(TypeInformation):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(kw_only=True)
|
||||
class BitmapTypeInformation(TypeInformation):
|
||||
"""Bitmap type information."""
|
||||
|
||||
label: list[str]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
|
||||
def from_json(cls, dpcode: DPCode, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a BitmapTypeInformation object."""
|
||||
if not (parsed := json_loads_object(data)):
|
||||
if not (parsed := json_loads_object(type_data)):
|
||||
return None
|
||||
return cls(dpcode, **cast(dict[str, list[str]], parsed))
|
||||
return cls(
|
||||
dpcode=dpcode,
|
||||
type_data=type_data,
|
||||
**cast(dict[str, list[str]], parsed),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@dataclass(kw_only=True)
|
||||
class EnumTypeData(TypeInformation):
|
||||
"""Enum Type Data."""
|
||||
|
||||
range: list[str]
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
|
||||
def from_json(cls, dpcode: DPCode, type_data: str) -> Self | None:
|
||||
"""Load JSON string and return a EnumTypeData object."""
|
||||
if not (parsed := json_loads_object(data)):
|
||||
if not (parsed := json_loads_object(type_data)):
|
||||
return None
|
||||
return cls(dpcode, **cast(dict[str, list[str]], parsed))
|
||||
return cls(
|
||||
dpcode=dpcode,
|
||||
type_data=type_data,
|
||||
**cast(dict[str, list[str]], parsed),
|
||||
)
|
||||
|
||||
|
||||
_TYPE_INFORMATION_MAPPINGS: dict[DPType, type[TypeInformation]] = {
|
||||
@@ -147,7 +157,7 @@ class DPCodeWrapper(ABC):
|
||||
native_unit: str | None = None
|
||||
suggested_unit: str | None = None
|
||||
|
||||
def __init__(self, dpcode: str) -> None:
|
||||
def __init__(self, dpcode: DPCode) -> None:
|
||||
"""Init DPCodeWrapper."""
|
||||
self.dpcode = dpcode
|
||||
|
||||
@@ -190,7 +200,7 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
|
||||
DPTYPE: DPType
|
||||
type_information: T
|
||||
|
||||
def __init__(self, dpcode: str, type_information: T) -> None:
|
||||
def __init__(self, dpcode: DPCode, type_information: T) -> None:
|
||||
"""Init DPCodeWrapper."""
|
||||
super().__init__(dpcode)
|
||||
self.type_information = type_information
|
||||
@@ -297,7 +307,7 @@ class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeData]):
|
||||
|
||||
DPTYPE = DPType.INTEGER
|
||||
|
||||
def __init__(self, dpcode: str, type_information: IntegerTypeData) -> None:
|
||||
def __init__(self, dpcode: DPCode, type_information: IntegerTypeData) -> None:
|
||||
"""Init DPCodeIntegerWrapper."""
|
||||
super().__init__(dpcode, type_information)
|
||||
self.native_unit = type_information.unit
|
||||
@@ -327,7 +337,7 @@ class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeData]):
|
||||
class DPCodeBitmapBitWrapper(DPCodeWrapper):
|
||||
"""Simple wrapper for a specific bit in bitmap values."""
|
||||
|
||||
def __init__(self, dpcode: str, mask: int) -> None:
|
||||
def __init__(self, dpcode: DPCode, mask: int) -> None:
|
||||
"""Init DPCodeBitmapWrapper."""
|
||||
super().__init__(dpcode)
|
||||
self._mask = mask
|
||||
@@ -428,7 +438,7 @@ def find_dpcode(
|
||||
and parse_dptype(current_definition.type) is dptype
|
||||
and (
|
||||
type_information := type_information_cls.from_json(
|
||||
dpcode, current_definition.values
|
||||
dpcode=dpcode, type_data=current_definition.values
|
||||
)
|
||||
)
|
||||
):
|
||||
|
||||
1
homeassistant/components/vagner_pool/__init__.py
Normal file
1
homeassistant/components/vagner_pool/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: VÁGNER POOL."""
|
||||
6
homeassistant/components/vagner_pool/manifest.json
Normal file
6
homeassistant/components/vagner_pool/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "vagner_pool",
|
||||
"name": "V\u00c1GNER POOL",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "pooldose"
|
||||
}
|
||||
54
homeassistant/components/victron_ble/__init__.py
Normal file
54
homeassistant/components/victron_ble/__init__.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""The Victron Bluetooth Low Energy integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from victron_ble_ha_parser import VictronBluetoothDeviceData
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothScanningMode,
|
||||
async_rediscover_address,
|
||||
)
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Victron BLE device from a config entry."""
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
key = entry.data[CONF_ACCESS_TOKEN]
|
||||
data = VictronBluetoothDeviceData(key)
|
||||
coordinator = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.ACTIVE,
|
||||
update_method=data.update,
|
||||
)
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR])
|
||||
entry.async_on_unload(coordinator.async_start())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
entry, [Platform.SENSOR]
|
||||
)
|
||||
|
||||
if unload_ok:
|
||||
async_rediscover_address(hass, entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
123
homeassistant/components/victron_ble/config_flow.py
Normal file
123
homeassistant/components/victron_ble/config_flow.py
Normal file
@@ -0,0 +1,123 @@
|
||||
"""Config flow for Victron Bluetooth Low Energy integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from victron_ble_ha_parser import VictronBluetoothDeviceData
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_ADDRESS
|
||||
|
||||
from .const import DOMAIN, VICTRON_IDENTIFIER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_ACCESS_TOKEN_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCESS_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class VictronBLEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Victron Bluetooth Low Energy."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered_device: str | None = None
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
self._discovered_devices_info: dict[str, BluetoothServiceInfoBleak] = {}
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the bluetooth discovery step."""
|
||||
_LOGGER.debug("async_step_bluetooth: %s", discovery_info.address)
|
||||
await self.async_set_unique_id(discovery_info.address)
|
||||
self._abort_if_unique_id_configured()
|
||||
device = VictronBluetoothDeviceData()
|
||||
if not device.supported(discovery_info):
|
||||
_LOGGER.debug("device %s not supported", discovery_info.address)
|
||||
return self.async_abort(reason="not_supported")
|
||||
|
||||
self._discovered_device = discovery_info.address
|
||||
self._discovered_devices_info[discovery_info.address] = discovery_info
|
||||
self._discovered_devices[discovery_info.address] = discovery_info.name
|
||||
|
||||
self.context["title_placeholders"] = {"title": discovery_info.name}
|
||||
|
||||
return await self.async_step_access_token()
|
||||
|
||||
async def async_step_access_token(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle advertisement key input."""
|
||||
# should only be called if there are discovered devices
|
||||
assert self._discovered_device is not None
|
||||
discovery_info = self._discovered_devices_info[self._discovered_device]
|
||||
title = discovery_info.name
|
||||
|
||||
if user_input is not None:
|
||||
# see if we can create a device with the access token
|
||||
device = VictronBluetoothDeviceData(user_input[CONF_ACCESS_TOKEN])
|
||||
if device.validate_advertisement_key(
|
||||
discovery_info.manufacturer_data[VICTRON_IDENTIFIER]
|
||||
):
|
||||
return self.async_create_entry(
|
||||
title=title,
|
||||
data=user_input,
|
||||
)
|
||||
return self.async_abort(reason="invalid_access_token")
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="access_token",
|
||||
data_schema=STEP_ACCESS_TOKEN_DATA_SCHEMA,
|
||||
description_placeholders={"title": title},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle select a device to set up."""
|
||||
if user_input is not None:
|
||||
address = user_input[CONF_ADDRESS]
|
||||
await self.async_set_unique_id(address, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured()
|
||||
self._discovered_device = address
|
||||
title = self._discovered_devices_info[address].name
|
||||
return self.async_show_form(
|
||||
step_id="access_token",
|
||||
data_schema=STEP_ACCESS_TOKEN_DATA_SCHEMA,
|
||||
description_placeholders={"title": title},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
continue
|
||||
device = VictronBluetoothDeviceData()
|
||||
if device.supported(discovery_info):
|
||||
self._discovered_devices_info[address] = discovery_info
|
||||
self._discovered_devices[address] = discovery_info.name
|
||||
|
||||
if len(self._discovered_devices) < 1:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
_LOGGER.debug("Discovered %s devices", len(self._discovered_devices))
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{vol.Required(CONF_ADDRESS): vol.In(self._discovered_devices)}
|
||||
),
|
||||
)
|
||||
4
homeassistant/components/victron_ble/const.py
Normal file
4
homeassistant/components/victron_ble/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the Victron Bluetooth Low Energy integration."""
|
||||
|
||||
DOMAIN = "victron_ble"
|
||||
VICTRON_IDENTIFIER = 0x02E1
|
||||
19
homeassistant/components/victron_ble/manifest.json
Normal file
19
homeassistant/components/victron_ble/manifest.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"domain": "victron_ble",
|
||||
"name": "Victron BLE",
|
||||
"bluetooth": [
|
||||
{
|
||||
"connectable": false,
|
||||
"manufacturer_data_start": [16],
|
||||
"manufacturer_id": 737
|
||||
}
|
||||
],
|
||||
"codeowners": ["@rajlaud"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/victron_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["victron-ble-ha-parser==0.4.9"]
|
||||
}
|
||||
85
homeassistant/components/victron_ble/quality_scale.yaml
Normal file
85
homeassistant/components/victron_ble/quality_scale.yaml
Normal file
@@ -0,0 +1,85 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is nothing to test, the integration just passively receives BLE advertisements.
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: This integration does not provide additional actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates:
|
||||
status: done
|
||||
reauthentication-flow:
|
||||
status: todo
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not use IP addresses. Bluetooth MAC addresses do not change.
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single device per instance, and each device needs a user-supplied encryption key to set up.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single device.
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
474
homeassistant/components/victron_ble/sensor.py
Normal file
474
homeassistant/components/victron_ble/sensor.py
Normal file
@@ -0,0 +1,474 @@
|
||||
"""Sensor platform for Victron BLE."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from sensor_state_data import DeviceKey
|
||||
from victron_ble_ha_parser import Keys, Units
|
||||
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothDataProcessor,
|
||||
PassiveBluetoothDataUpdate,
|
||||
PassiveBluetoothEntityKey,
|
||||
PassiveBluetoothProcessorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
UnitOfElectricCurrent,
|
||||
UnitOfElectricPotential,
|
||||
UnitOfEnergy,
|
||||
UnitOfPower,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AC_IN_OPTIONS = [
|
||||
"ac_in_1",
|
||||
"ac_in_2",
|
||||
"not_connected",
|
||||
]
|
||||
|
||||
ALARM_OPTIONS = [
|
||||
"low_voltage",
|
||||
"high_voltage",
|
||||
"low_soc",
|
||||
"low_starter_voltage",
|
||||
"high_starter_voltage",
|
||||
"low_temperature",
|
||||
"high_temperature",
|
||||
"mid_voltage",
|
||||
"overload",
|
||||
"dc_ripple",
|
||||
"low_v_ac_out",
|
||||
"high_v_ac_out",
|
||||
"short_circuit",
|
||||
"bms_lockout",
|
||||
]
|
||||
|
||||
CHARGER_ERROR_OPTIONS = [
|
||||
"no_error",
|
||||
"temperature_battery_high",
|
||||
"voltage_high",
|
||||
"remote_temperature_auto_reset",
|
||||
"remote_temperature_not_auto_reset",
|
||||
"remote_battery",
|
||||
"high_ripple",
|
||||
"temperature_battery_low",
|
||||
"temperature_charger",
|
||||
"over_current",
|
||||
"bulk_time",
|
||||
"current_sensor",
|
||||
"internal_temperature",
|
||||
"fan",
|
||||
"overheated",
|
||||
"short_circuit",
|
||||
"converter_issue",
|
||||
"over_charge",
|
||||
"input_voltage",
|
||||
"input_current",
|
||||
"input_power",
|
||||
"input_shutdown_voltage",
|
||||
"input_shutdown_current",
|
||||
"input_shutdown_failure",
|
||||
"inverter_shutdown_pv_isolation",
|
||||
"inverter_shutdown_ground_fault",
|
||||
"inverter_overload",
|
||||
"inverter_temperature",
|
||||
"inverter_peak_current",
|
||||
"inverter_output_voltage",
|
||||
"inverter_self_test",
|
||||
"inverter_ac",
|
||||
"communication",
|
||||
"synchronisation",
|
||||
"bms",
|
||||
"network",
|
||||
"pv_input_shutdown",
|
||||
"cpu_temperature",
|
||||
"calibration_lost",
|
||||
"firmware",
|
||||
"settings",
|
||||
"tester_fail",
|
||||
"internal_dc_voltage",
|
||||
"self_test",
|
||||
"internal_supply",
|
||||
]
|
||||
|
||||
|
||||
def error_to_state(value: float | str | None) -> str | None:
|
||||
"""Convert error code to state string."""
|
||||
value_map: dict[Any, str] = {
|
||||
"internal_supply_a": "internal_supply",
|
||||
"internal_supply_b": "internal_supply",
|
||||
"internal_supply_c": "internal_supply",
|
||||
"internal_supply_d": "internal_supply",
|
||||
"inverter_shutdown_41": "inverter_shutdown_pv_isolation",
|
||||
"inverter_shutdown_42": "inverter_shutdown_pv_isolation",
|
||||
"inverter_shutdown_43": "inverter_shutdown_ground_fault",
|
||||
"internal_temperature_a": "internal_temperature",
|
||||
"internal_temperature_b": "internal_temperature",
|
||||
"inverter_output_voltage_a": "inverter_output_voltage",
|
||||
"inverter_output_voltage_b": "inverter_output_voltage",
|
||||
"internal_dc_voltage_a": "internal_dc_voltage",
|
||||
"internal_dc_voltage_b": "internal_dc_voltage",
|
||||
"remote_temperature_a": "remote_temperature_auto_reset",
|
||||
"remote_temperature_b": "remote_temperature_auto_reset",
|
||||
"remote_temperature_c": "remote_temperature_not_auto_reset",
|
||||
"remote_battery_a": "remote_battery",
|
||||
"remote_battery_b": "remote_battery",
|
||||
"remote_battery_c": "remote_battery",
|
||||
"pv_input_shutdown_80": "pv_input_shutdown",
|
||||
"pv_input_shutdown_81": "pv_input_shutdown",
|
||||
"pv_input_shutdown_82": "pv_input_shutdown",
|
||||
"pv_input_shutdown_83": "pv_input_shutdown",
|
||||
"pv_input_shutdown_84": "pv_input_shutdown",
|
||||
"pv_input_shutdown_85": "pv_input_shutdown",
|
||||
"pv_input_shutdown_86": "pv_input_shutdown",
|
||||
"pv_input_shutdown_87": "pv_input_shutdown",
|
||||
"inverter_self_test_a": "inverter_self_test",
|
||||
"inverter_self_test_b": "inverter_self_test",
|
||||
"inverter_self_test_c": "inverter_self_test",
|
||||
"network_a": "network",
|
||||
"network_b": "network",
|
||||
"network_c": "network",
|
||||
"network_d": "network",
|
||||
}
|
||||
return value_map.get(value)
|
||||
|
||||
|
||||
DEVICE_STATE_OPTIONS = [
|
||||
"off",
|
||||
"low_power",
|
||||
"fault",
|
||||
"bulk",
|
||||
"absorption",
|
||||
"float",
|
||||
"storage",
|
||||
"equalize_manual",
|
||||
"inverting",
|
||||
"power_supply",
|
||||
"starting_up",
|
||||
"repeated_absorption",
|
||||
"recondition",
|
||||
"battery_safe",
|
||||
"active",
|
||||
"external_control",
|
||||
"not_available",
|
||||
]
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class VictronBLESensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Victron BLE sensor entity."""
|
||||
|
||||
value_fn: Callable[[float | int | str | None], float | int | str | None] = (
|
||||
lambda x: x
|
||||
)
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = {
|
||||
Keys.AC_IN_POWER: VictronBLESensorEntityDescription(
|
||||
key=Keys.AC_IN_POWER,
|
||||
translation_key=Keys.AC_IN_POWER,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.AC_IN_STATE: VictronBLESensorEntityDescription(
|
||||
key=Keys.AC_IN_STATE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="ac_in_state",
|
||||
options=AC_IN_OPTIONS,
|
||||
),
|
||||
Keys.AC_OUT_POWER: VictronBLESensorEntityDescription(
|
||||
key=Keys.AC_OUT_POWER,
|
||||
translation_key=Keys.AC_OUT_POWER,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.AC_OUT_STATE: VictronBLESensorEntityDescription(
|
||||
key=Keys.AC_OUT_STATE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="device_state",
|
||||
options=DEVICE_STATE_OPTIONS,
|
||||
),
|
||||
Keys.ALARM: VictronBLESensorEntityDescription(
|
||||
key=Keys.ALARM,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="alarm",
|
||||
options=ALARM_OPTIONS,
|
||||
),
|
||||
Keys.BALANCER_STATUS: VictronBLESensorEntityDescription(
|
||||
key=Keys.BALANCER_STATUS,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="balancer_status",
|
||||
options=["balanced", "balancing", "imbalance"],
|
||||
),
|
||||
Keys.BATTERY_CURRENT: VictronBLESensorEntityDescription(
|
||||
key=Keys.BATTERY_CURRENT,
|
||||
translation_key=Keys.BATTERY_CURRENT,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.BATTERY_TEMPERATURE: VictronBLESensorEntityDescription(
|
||||
key=Keys.BATTERY_TEMPERATURE,
|
||||
translation_key=Keys.BATTERY_TEMPERATURE,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.BATTERY_VOLTAGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.BATTERY_VOLTAGE,
|
||||
translation_key=Keys.BATTERY_VOLTAGE,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.CHARGER_ERROR: VictronBLESensorEntityDescription(
|
||||
key=Keys.CHARGER_ERROR,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="charger_error",
|
||||
options=CHARGER_ERROR_OPTIONS,
|
||||
value_fn=error_to_state,
|
||||
),
|
||||
Keys.CONSUMED_AMPERE_HOURS: VictronBLESensorEntityDescription(
|
||||
key=Keys.CONSUMED_AMPERE_HOURS,
|
||||
translation_key=Keys.CONSUMED_AMPERE_HOURS,
|
||||
native_unit_of_measurement=Units.ELECTRIC_CURRENT_FLOW_AMPERE_HOUR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.CURRENT: VictronBLESensorEntityDescription(
|
||||
key=Keys.CURRENT,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.DEVICE_STATE: VictronBLESensorEntityDescription(
|
||||
key=Keys.DEVICE_STATE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="device_state",
|
||||
options=DEVICE_STATE_OPTIONS,
|
||||
),
|
||||
Keys.ERROR_CODE: VictronBLESensorEntityDescription(
|
||||
key=Keys.ERROR_CODE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="charger_error",
|
||||
options=CHARGER_ERROR_OPTIONS,
|
||||
),
|
||||
Keys.EXTERNAL_DEVICE_LOAD: VictronBLESensorEntityDescription(
|
||||
key=Keys.EXTERNAL_DEVICE_LOAD,
|
||||
translation_key=Keys.EXTERNAL_DEVICE_LOAD,
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.INPUT_VOLTAGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.INPUT_VOLTAGE,
|
||||
translation_key=Keys.INPUT_VOLTAGE,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.METER_TYPE: VictronBLESensorEntityDescription(
|
||||
key=Keys.METER_TYPE,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="meter_type",
|
||||
options=[
|
||||
"solar_charger",
|
||||
"wind_charger",
|
||||
"shaft_generator",
|
||||
"alternator",
|
||||
"fuel_cell",
|
||||
"water_generator",
|
||||
"dc_dc_charger",
|
||||
"ac_charger",
|
||||
"generic_source",
|
||||
"generic_load",
|
||||
"electric_drive",
|
||||
"fridge",
|
||||
"water_pump",
|
||||
"bilge_pump",
|
||||
"dc_system",
|
||||
"inverter",
|
||||
"water_heater",
|
||||
],
|
||||
),
|
||||
Keys.MIDPOINT_VOLTAGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.MIDPOINT_VOLTAGE,
|
||||
translation_key=Keys.MIDPOINT_VOLTAGE,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.OFF_REASON: VictronBLESensorEntityDescription(
|
||||
key=Keys.OFF_REASON,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="off_reason",
|
||||
options=[
|
||||
"no_reason",
|
||||
"no_input_power",
|
||||
"switched_off_switch",
|
||||
"switched_off_register",
|
||||
"remote_input",
|
||||
"protection_active",
|
||||
"pay_as_you_go_out_of_credit",
|
||||
"bms",
|
||||
"engine_shutdown",
|
||||
"analysing_input_voltage",
|
||||
],
|
||||
),
|
||||
Keys.OUTPUT_VOLTAGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.OUTPUT_VOLTAGE,
|
||||
translation_key=Keys.OUTPUT_VOLTAGE,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.REMAINING_MINUTES: VictronBLESensorEntityDescription(
|
||||
key=Keys.REMAINING_MINUTES,
|
||||
translation_key=Keys.REMAINING_MINUTES,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
SensorDeviceClass.SIGNAL_STRENGTH: VictronBLESensorEntityDescription(
|
||||
key=SensorDeviceClass.SIGNAL_STRENGTH.value,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.SOLAR_POWER: VictronBLESensorEntityDescription(
|
||||
key=Keys.SOLAR_POWER,
|
||||
translation_key=Keys.SOLAR_POWER,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.STARTER_VOLTAGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.STARTER_VOLTAGE,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.STATE_OF_CHARGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.STATE_OF_CHARGE,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.TEMPERATURE: VictronBLESensorEntityDescription(
|
||||
key=Keys.TEMPERATURE,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.VOLTAGE: VictronBLESensorEntityDescription(
|
||||
key=Keys.VOLTAGE,
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
Keys.WARNING: VictronBLESensorEntityDescription(
|
||||
key=Keys.WARNING,
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key="alarm",
|
||||
options=ALARM_OPTIONS,
|
||||
),
|
||||
Keys.YIELD_TODAY: VictronBLESensorEntityDescription(
|
||||
key=Keys.YIELD_TODAY,
|
||||
translation_key=Keys.YIELD_TODAY,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
}
|
||||
|
||||
for i in range(1, 8):
|
||||
cell_key = getattr(Keys, f"CELL_{i}_VOLTAGE")
|
||||
SENSOR_DESCRIPTIONS[cell_key] = VictronBLESensorEntityDescription(
|
||||
key=cell_key,
|
||||
translation_key="cell_voltage",
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
)
|
||||
|
||||
|
||||
def _device_key_to_bluetooth_entity_key(
|
||||
device_key: DeviceKey,
|
||||
) -> PassiveBluetoothEntityKey:
|
||||
"""Convert a device key to an entity key."""
|
||||
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id)
|
||||
|
||||
|
||||
def sensor_update_to_bluetooth_data_update(
|
||||
sensor_update,
|
||||
) -> PassiveBluetoothDataUpdate:
|
||||
"""Convert a sensor update to a bluetooth data update."""
|
||||
return PassiveBluetoothDataUpdate(
|
||||
devices={
|
||||
device_id: sensor_device_info_to_hass_device_info(device_info)
|
||||
for device_id, device_info in sensor_update.devices.items()
|
||||
},
|
||||
entity_descriptions={
|
||||
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
|
||||
device_key.key
|
||||
]
|
||||
for device_key in sensor_update.entity_descriptions
|
||||
if device_key.key in SENSOR_DESCRIPTIONS
|
||||
},
|
||||
entity_data={
|
||||
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
if device_key.key in SENSOR_DESCRIPTIONS
|
||||
},
|
||||
entity_names={},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Victron BLE sensor."""
|
||||
coordinator = entry.runtime_data
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
VictronBLESensorEntity, async_add_entities
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_register_processor(processor))
|
||||
|
||||
|
||||
class VictronBLESensorEntity(PassiveBluetoothProcessorEntity, SensorEntity):
|
||||
"""Representation of Victron BLE sensor."""
|
||||
|
||||
entity_description: VictronBLESensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | int | str | None:
|
||||
"""Return the state of the sensor."""
|
||||
value = self.processor.entity_data.get(self.entity_key)
|
||||
|
||||
return self.entity_description.value_fn(value)
|
||||
234
homeassistant/components/victron_ble/strings.json
Normal file
234
homeassistant/components/victron_ble/strings.json
Normal file
@@ -0,0 +1,234 @@
|
||||
{
|
||||
"common": {
|
||||
"high_voltage": "High voltage",
|
||||
"low_voltage": "Low voltage",
|
||||
"midpoint_voltage": "Midpoint voltage",
|
||||
"starter_voltage": "Starter voltage"
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_access_token": "Invalid encryption key for instant readout",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
},
|
||||
"flow_title": "{title}",
|
||||
"step": {
|
||||
"access_token": {
|
||||
"data": {
|
||||
"access_token": "The encryption key for instant readout of the Victron device."
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "The encryption key for instant readout may be found in the VictronConnect app under Settings > Product info > Instant readout details > Encryption data > Encryption Key."
|
||||
},
|
||||
"title": "{title}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"address": "The Bluetooth address of the Victron device."
|
||||
},
|
||||
"data_description": {
|
||||
"address": "This Bluetooth address is automatically discovered. You may view a device's Bluetooth address in the VictronConnect app under Settings > Product info > Instant readout details > Encryption data > MAC Address."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"ac_in_power": {
|
||||
"name": "AC-in power"
|
||||
},
|
||||
"ac_in_state": {
|
||||
"name": "AC-in state",
|
||||
"state": {
|
||||
"ac_in_1": "AC-in 1",
|
||||
"ac_in_2": "AC-in 2",
|
||||
"not_connected": "Not connected"
|
||||
}
|
||||
},
|
||||
"ac_out_power": {
|
||||
"name": "AC-out power"
|
||||
},
|
||||
"alarm": {
|
||||
"name": "Alarm",
|
||||
"state": {
|
||||
"bms_lockout": "Battery management system lockout",
|
||||
"dc_ripple": "DC ripple",
|
||||
"high_starter_voltage": "High starter voltage",
|
||||
"high_temperature": "High temperature",
|
||||
"high_v_ac_out": "AC-out overvoltage",
|
||||
"high_voltage": "Overvoltage",
|
||||
"low_soc": "Low state of charge",
|
||||
"low_starter_voltage": "Low starter voltage",
|
||||
"low_temperature": "Low temperature",
|
||||
"low_v_ac_out": "AC-out undervoltage",
|
||||
"low_voltage": "Undervoltage",
|
||||
"mid_voltage": "[%key:component::victron_ble::common::midpoint_voltage%]",
|
||||
"overload": "Overload",
|
||||
"short_circuit": "Short circuit"
|
||||
}
|
||||
},
|
||||
"balancer_status": {
|
||||
"name": "Balancer status",
|
||||
"state": {
|
||||
"balanced": "Balanced",
|
||||
"balancing": "Balancing",
|
||||
"imbalance": "Imbalance"
|
||||
}
|
||||
},
|
||||
"battery_current": {
|
||||
"name": "Battery current"
|
||||
},
|
||||
"battery_temperature": {
|
||||
"name": "Battery temperature"
|
||||
},
|
||||
"battery_voltage": {
|
||||
"name": "Battery voltage"
|
||||
},
|
||||
"cell_voltage": {
|
||||
"name": "Cell {cell} voltage"
|
||||
},
|
||||
"charger_error": {
|
||||
"name": "Charger error",
|
||||
"state": {
|
||||
"bms": "BMS connection lost",
|
||||
"bulk_time": "Bulk time limit exceeded",
|
||||
"calibration_lost": "Factory calibration data lost",
|
||||
"communication": "Communication warning",
|
||||
"converter_issue": "Converter issue",
|
||||
"cpu_temperature": "CPU temperature too high",
|
||||
"current_sensor": "Current sensor issue",
|
||||
"fan": "Fan failure",
|
||||
"firmware": "Invalid or incompatible firmware",
|
||||
"high_ripple": "Battery high ripple voltage",
|
||||
"input_current": "Input overcurrent",
|
||||
"input_power": "Input overpower",
|
||||
"input_shutdown_current": "Input shutdown (current flow during off mode)",
|
||||
"input_shutdown_failure": "PV input failed to shutdown",
|
||||
"input_shutdown_voltage": "Input shutdown (battery overvoltage)",
|
||||
"input_voltage": "Input overvoltage",
|
||||
"internal_dc_voltage": "Internal DC voltage error",
|
||||
"internal_supply": "Internal supply voltage error",
|
||||
"internal_temperature": "Internal temperature sensor failure",
|
||||
"inverter_ac": "Inverter AC voltage on output",
|
||||
"inverter_output_voltage": "Inverter output voltage",
|
||||
"inverter_overload": "Inverter overload",
|
||||
"inverter_peak_current": "Inverter peak current",
|
||||
"inverter_self_test": "Inverter self-test failed",
|
||||
"inverter_shutdown_ground_fault": "Inverter shutdown (Ground fault)",
|
||||
"inverter_shutdown_pv_isolation": "Inverter shutdown (PV isolation)",
|
||||
"inverter_temperature": "Inverter temperature too high",
|
||||
"network": "Network misconfigured",
|
||||
"no_error": "No error",
|
||||
"over_charge": "Overcharge protection",
|
||||
"over_current": "Charger overcurrent",
|
||||
"overheated": "Terminals overheated",
|
||||
"pv_input_shutdown": "PV input shutdown",
|
||||
"remote_battery": "Remote battery voltage sense failure",
|
||||
"remote_temperature_auto_reset": "Remote temperature sensor failure (auto-reset)",
|
||||
"remote_temperature_not_auto_reset": "Remote temperature sensor failure (not auto-reset)",
|
||||
"self_test": "PV residual current sensor self-test failure",
|
||||
"settings": "Settings data lost",
|
||||
"short_circuit": "Charger short circuit",
|
||||
"synchronisation": "Synchronized charging device configuration issue",
|
||||
"temperature_battery_high": "Battery temperature too high",
|
||||
"temperature_battery_low": "Battery temperature too low",
|
||||
"temperature_charger": "Charger temperature too high",
|
||||
"tester_fail": "Tester fail",
|
||||
"voltage_high": "Battery overvoltage"
|
||||
}
|
||||
},
|
||||
"consumed_ampere_hours": {
|
||||
"name": "Consumed ampere hours"
|
||||
},
|
||||
"device_state": {
|
||||
"name": "Device state",
|
||||
"state": {
|
||||
"absorption": "Absorption",
|
||||
"active": "Active",
|
||||
"battery_safe": "Battery safe",
|
||||
"bulk": "Bulk",
|
||||
"equalize_manual": "Equalize (manual)",
|
||||
"external_control": "External control",
|
||||
"fault": "Fault",
|
||||
"float": "Float",
|
||||
"inverting": "Inverting",
|
||||
"low_power": "Low power",
|
||||
"not_available": "Not available",
|
||||
"off": "[%key:common::state::off%]",
|
||||
"power_supply": "Power supply",
|
||||
"recondition": "Recondition",
|
||||
"repeated_absorption": "Repeated absorption",
|
||||
"starting_up": "Starting up",
|
||||
"storage": "Storage"
|
||||
}
|
||||
},
|
||||
"error_code": {
|
||||
"name": "Error code"
|
||||
},
|
||||
"external_device_load": {
|
||||
"name": "External device load"
|
||||
},
|
||||
"input_voltage": {
|
||||
"name": "Input voltage"
|
||||
},
|
||||
"meter_type": {
|
||||
"name": "Meter type",
|
||||
"state": {
|
||||
"ac_charger": "AC charger",
|
||||
"alternator": "Alternator",
|
||||
"bilge_pump": "Bilge pump",
|
||||
"dc_dc_charger": "DC-DC charger",
|
||||
"dc_system": "DC system",
|
||||
"electric_drive": "Electric drive",
|
||||
"fridge": "Fridge",
|
||||
"fuel_cell": "Fuel cell",
|
||||
"generic_load": "Generic load",
|
||||
"generic_source": "Generic source",
|
||||
"inverter": "Inverter",
|
||||
"shaft_generator": "Shaft generator",
|
||||
"solar_charger": "Solar charger",
|
||||
"water_generator": "Water generator",
|
||||
"water_heater": "Water heater",
|
||||
"water_pump": "Water pump",
|
||||
"wind_charger": "Wind charger"
|
||||
}
|
||||
},
|
||||
"midpoint_voltage": {
|
||||
"name": "[%key:component::victron_ble::common::midpoint_voltage%]"
|
||||
},
|
||||
"off_reason": {
|
||||
"name": "Off reason",
|
||||
"state": {
|
||||
"analysing_input_voltage": "Analyzing input voltage",
|
||||
"bms": "Battery management system",
|
||||
"engine_shutdown": "Engine shutdown",
|
||||
"no_input_power": "No input power",
|
||||
"no_reason": "No reason",
|
||||
"pay_as_you_go_out_of_credit": "Pay-as-you-go out of credit",
|
||||
"protection_active": "Protection active",
|
||||
"remote_input": "Remote input",
|
||||
"switched_off_register": "Switched off by register",
|
||||
"switched_off_switch": "Switched off by switch"
|
||||
}
|
||||
},
|
||||
"output_voltage": {
|
||||
"name": "Output voltage"
|
||||
},
|
||||
"remaining_minutes": {
|
||||
"name": "Remaining minutes"
|
||||
},
|
||||
"solar_power": {
|
||||
"name": "Solar power"
|
||||
},
|
||||
"starter_voltage": {
|
||||
"name": "[%key:component::victron_ble::common::starter_voltage%]"
|
||||
},
|
||||
"warning": {
|
||||
"name": "Warning"
|
||||
},
|
||||
"yield_today": {
|
||||
"name": "Yield today"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -492,6 +492,7 @@ class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol
|
||||
await asyncio.sleep(_ANNOUNCEMENT_AFTER_DELAY)
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error while playing announcement")
|
||||
self._announcement = None
|
||||
raise
|
||||
finally:
|
||||
self._run_pipeline_task = None
|
||||
|
||||
8
homeassistant/generated/bluetooth.py
generated
8
homeassistant/generated/bluetooth.py
generated
@@ -849,6 +849,14 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
|
||||
"manufacturer_id": 34714,
|
||||
"service_uuid": "0000cee0-0000-1000-8000-00805f9b34fb",
|
||||
},
|
||||
{
|
||||
"connectable": False,
|
||||
"domain": "victron_ble",
|
||||
"manufacturer_data_start": [
|
||||
16,
|
||||
],
|
||||
"manufacturer_id": 737,
|
||||
},
|
||||
{
|
||||
"connectable": False,
|
||||
"domain": "xiaomi_ble",
|
||||
|
||||
1
homeassistant/generated/config_flows.py
generated
1
homeassistant/generated/config_flows.py
generated
@@ -723,6 +723,7 @@ FLOWS = {
|
||||
"version",
|
||||
"vesync",
|
||||
"vicare",
|
||||
"victron_ble",
|
||||
"victron_remote_monitoring",
|
||||
"vilfo",
|
||||
"vizio",
|
||||
|
||||
@@ -1136,6 +1136,11 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"cosori": {
|
||||
"name": "Cosori",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "vesync"
|
||||
},
|
||||
"cozytouch": {
|
||||
"name": "Atlantic Cozytouch",
|
||||
"integration_type": "virtual",
|
||||
@@ -7191,6 +7196,11 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
},
|
||||
"vagner_pool": {
|
||||
"name": "V\u00c1GNER POOL",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "pooldose"
|
||||
},
|
||||
"vallox": {
|
||||
"name": "Vallox",
|
||||
"integration_type": "hub",
|
||||
@@ -7272,11 +7282,22 @@
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
},
|
||||
"victron_remote_monitoring": {
|
||||
"name": "Victron Remote Monitoring",
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling"
|
||||
"victron": {
|
||||
"name": "Victron",
|
||||
"integrations": {
|
||||
"victron_ble": {
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push",
|
||||
"name": "Victron BLE"
|
||||
},
|
||||
"victron_remote_monitoring": {
|
||||
"integration_type": "service",
|
||||
"config_flow": true,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Victron Remote Monitoring"
|
||||
}
|
||||
}
|
||||
},
|
||||
"vilfo": {
|
||||
"name": "Vilfo Router",
|
||||
|
||||
11
requirements_all.txt
generated
11
requirements_all.txt
generated
@@ -209,7 +209,7 @@ aioaseko==1.0.0
|
||||
aioasuswrt==1.5.1
|
||||
|
||||
# homeassistant.components.husqvarna_automower
|
||||
aioautomower==2.7.0
|
||||
aioautomower==2.7.1
|
||||
|
||||
# homeassistant.components.azure_devops
|
||||
aioazuredevops==2.2.2
|
||||
@@ -1601,7 +1601,7 @@ odp-amsterdam==6.1.2
|
||||
oemthermostat==1.1.1
|
||||
|
||||
# homeassistant.components.ohme
|
||||
ohme==1.5.2
|
||||
ohme==1.6.0
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.5.1
|
||||
@@ -2150,7 +2150,7 @@ pylitejet==0.6.3
|
||||
pylitterbot==2025.0.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.25.0
|
||||
pylutron-caseta==0.26.0
|
||||
|
||||
# homeassistant.components.lutron
|
||||
pylutron==0.2.18
|
||||
@@ -2269,7 +2269,7 @@ pypaperless==4.1.1
|
||||
pypca==0.0.7
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.9.2
|
||||
pypck==0.9.5
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.5
|
||||
@@ -3088,6 +3088,9 @@ velbus-aio==2025.11.0
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
# homeassistant.components.victron_ble
|
||||
victron-ble-ha-parser==0.4.9
|
||||
|
||||
# homeassistant.components.victron_remote_monitoring
|
||||
victron-vrm==0.1.8
|
||||
|
||||
|
||||
11
requirements_test_all.txt
generated
11
requirements_test_all.txt
generated
@@ -197,7 +197,7 @@ aioaseko==1.0.0
|
||||
aioasuswrt==1.5.1
|
||||
|
||||
# homeassistant.components.husqvarna_automower
|
||||
aioautomower==2.7.0
|
||||
aioautomower==2.7.1
|
||||
|
||||
# homeassistant.components.azure_devops
|
||||
aioazuredevops==2.2.2
|
||||
@@ -1372,7 +1372,7 @@ objgraph==3.5.0
|
||||
odp-amsterdam==6.1.2
|
||||
|
||||
# homeassistant.components.ohme
|
||||
ohme==1.5.2
|
||||
ohme==1.6.0
|
||||
|
||||
# homeassistant.components.ollama
|
||||
ollama==0.5.1
|
||||
@@ -1794,7 +1794,7 @@ pylitejet==0.6.3
|
||||
pylitterbot==2025.0.0
|
||||
|
||||
# homeassistant.components.lutron_caseta
|
||||
pylutron-caseta==0.25.0
|
||||
pylutron-caseta==0.26.0
|
||||
|
||||
# homeassistant.components.lutron
|
||||
pylutron==0.2.18
|
||||
@@ -1892,7 +1892,7 @@ pypalazzetti==0.1.20
|
||||
pypaperless==4.1.1
|
||||
|
||||
# homeassistant.components.lcn
|
||||
pypck==0.9.2
|
||||
pypck==0.9.5
|
||||
|
||||
# homeassistant.components.pglab
|
||||
pypglab==0.0.5
|
||||
@@ -2555,6 +2555,9 @@ velbus-aio==2025.11.0
|
||||
# homeassistant.components.venstar
|
||||
venstarcolortouch==0.21
|
||||
|
||||
# homeassistant.components.victron_ble
|
||||
victron-ble-ha-parser==0.4.9
|
||||
|
||||
# homeassistant.components.victron_remote_monitoring
|
||||
victron-vrm==0.1.8
|
||||
|
||||
|
||||
@@ -1608,12 +1608,16 @@ def mock_integration(
|
||||
top_level_files: set[str] | None = None,
|
||||
) -> loader.Integration:
|
||||
"""Mock an integration."""
|
||||
integration = loader.Integration(
|
||||
hass,
|
||||
path = (
|
||||
f"{loader.PACKAGE_BUILTIN}.{module.DOMAIN}"
|
||||
if built_in
|
||||
else f"{loader.PACKAGE_CUSTOM_COMPONENTS}.{module.DOMAIN}",
|
||||
pathlib.Path(""),
|
||||
else f"{loader.PACKAGE_CUSTOM_COMPONENTS}.{module.DOMAIN}"
|
||||
)
|
||||
|
||||
integration = loader.Integration(
|
||||
hass,
|
||||
path,
|
||||
pathlib.Path(path.replace(".", "/")),
|
||||
module.mock_manifest(),
|
||||
top_level_files,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""The tests for the analytics ."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
@@ -22,8 +23,10 @@ from homeassistant.components.analytics.analytics import (
|
||||
from homeassistant.components.analytics.const import (
|
||||
ANALYTICS_ENDPOINT_URL,
|
||||
ANALYTICS_ENDPOINT_URL_DEV,
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
ATTR_BASE,
|
||||
ATTR_DIAGNOSTICS,
|
||||
ATTR_SNAPSHOTS,
|
||||
ATTR_STATISTICS,
|
||||
ATTR_USAGE,
|
||||
)
|
||||
@@ -31,13 +34,20 @@ from homeassistant.components.number import NumberDeviceClass
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState
|
||||
from homeassistant.const import ATTR_ASSUMED_STATE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, ReleaseChannel
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.loader import IntegrationNotFound
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform
|
||||
from tests.common import (
|
||||
MockConfigEntry,
|
||||
MockModule,
|
||||
async_fire_time_changed,
|
||||
mock_integration,
|
||||
mock_platform,
|
||||
)
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
@@ -59,9 +69,31 @@ def uuid_mock() -> Generator[None]:
|
||||
@pytest.fixture(autouse=True)
|
||||
def ha_version_mock() -> Generator[None]:
|
||||
"""Mock the core version."""
|
||||
with patch(
|
||||
"homeassistant.components.analytics.analytics.HA_VERSION",
|
||||
MOCK_VERSION,
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.analytics.analytics.HA_VERSION",
|
||||
MOCK_VERSION,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.analytics.analytics.RELEASE_CHANNEL",
|
||||
ReleaseChannel.STABLE,
|
||||
),
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ha_dev_version_mock() -> Generator[None]:
|
||||
"""Mock the core version as a dev version."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.analytics.analytics.HA_VERSION",
|
||||
MOCK_VERSION_DEV,
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.analytics.analytics.RELEASE_CHANNEL",
|
||||
ReleaseChannel.DEV,
|
||||
),
|
||||
):
|
||||
yield
|
||||
|
||||
@@ -97,7 +129,6 @@ async def test_no_send(
|
||||
|
||||
await analytics.send_analytics()
|
||||
|
||||
assert "Nothing to submit" in caplog.text
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@@ -615,7 +646,7 @@ async def test_custom_integrations(
|
||||
assert snapshot == submitted_data
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("supervisor_client")
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
|
||||
async def test_dev_url(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
@@ -625,16 +656,13 @@ async def test_dev_url(
|
||||
analytics = Analytics(hass)
|
||||
await analytics.save_preferences({ATTR_BASE: True})
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
|
||||
):
|
||||
await analytics.send_analytics()
|
||||
await analytics.send_analytics()
|
||||
|
||||
payload = aioclient_mock.mock_calls[0]
|
||||
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("supervisor_client")
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
|
||||
async def test_dev_url_error(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
@@ -645,10 +673,7 @@ async def test_dev_url_error(
|
||||
analytics = Analytics(hass)
|
||||
await analytics.save_preferences({ATTR_BASE: True})
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
|
||||
):
|
||||
await analytics.send_analytics()
|
||||
await analytics.send_analytics()
|
||||
|
||||
payload = aioclient_mock.mock_calls[0]
|
||||
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV
|
||||
@@ -860,7 +885,7 @@ async def test_send_with_problems_loading_yaml(
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_hass_config", "supervisor_client")
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock", "mock_hass_config", "supervisor_client")
|
||||
async def test_timeout_while_sending(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
@@ -871,10 +896,7 @@ async def test_timeout_while_sending(
|
||||
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, exc=TimeoutError())
|
||||
|
||||
await analytics.save_preferences({ATTR_BASE: True})
|
||||
with patch(
|
||||
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
|
||||
):
|
||||
await analytics.send_analytics()
|
||||
await analytics.send_analytics()
|
||||
|
||||
assert "Timeout sending analytics" in caplog.text
|
||||
|
||||
@@ -1426,3 +1448,346 @@ async def test_analytics_platforms(
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def test_send_snapshot_disabled(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test no snapshots are sent."""
|
||||
analytics = Analytics(hass)
|
||||
|
||||
await analytics.send_snapshot()
|
||||
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: False})
|
||||
await analytics.send_snapshot()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
async def test_send_snapshot_success(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test successful snapshot submission."""
|
||||
aioclient_mock.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
status=200,
|
||||
json={"submission_identifier": "test-identifier-123"},
|
||||
)
|
||||
|
||||
analytics = Analytics(hass)
|
||||
|
||||
await analytics.save_preferences({ATTR_SNAPSHOTS: True})
|
||||
await analytics.send_snapshot()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
|
||||
preferences = await analytics._store.async_load()
|
||||
assert preferences["submission_identifier"] == "test-identifier-123"
|
||||
assert "Submitted snapshot analytics to Home Assistant servers" in caplog.text
|
||||
|
||||
|
||||
async def test_send_snapshot_with_existing_identifier(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test snapshot submission with existing identifier."""
|
||||
aioclient_mock.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
status=200,
|
||||
json={"submission_identifier": "test-identifier-123"},
|
||||
)
|
||||
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
|
||||
"uuid": "12345",
|
||||
"submission_identifier": "old-identifier",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.send_snapshot()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
call_headers = aioclient_mock.mock_calls[0][3]
|
||||
assert call_headers["X-Device-Database-Submission-Identifier"] == "old-identifier"
|
||||
|
||||
preferences = await analytics._store.async_load()
|
||||
assert preferences["submission_identifier"] == "test-identifier-123"
|
||||
assert "Submitted snapshot analytics to Home Assistant servers" in caplog.text
|
||||
|
||||
|
||||
async def test_send_snapshot_invalid_identifier(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test snapshot submission with invalid identifier."""
|
||||
aioclient_mock.post(
|
||||
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
|
||||
status=400,
|
||||
json={
|
||||
"kind": "invalid-submission-identifier",
|
||||
"message": "The identifier is invalid",
|
||||
},
|
||||
)
|
||||
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
|
||||
"uuid": "12345",
|
||||
"submission_identifier": "invalid-identifier",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.send_snapshot()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
|
||||
preferences = await analytics._store.async_load()
|
||||
assert preferences.get("submission_identifier") is None
|
||||
assert "Invalid submission identifier" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("post_kwargs", "expected_log"),
|
||||
[
|
||||
(
|
||||
{
|
||||
"status": 400,
|
||||
"json": {
|
||||
"kind": "malformed-payload",
|
||||
"message": "Invalid payload format",
|
||||
},
|
||||
},
|
||||
"Malformed snapshot analytics submission",
|
||||
),
|
||||
(
|
||||
{"status": 503, "text": "Service Unavailable"},
|
||||
f"Snapshot analytics service {ANALYTICS_SNAPSHOT_ENDPOINT_URL} unavailable",
|
||||
),
|
||||
(
|
||||
{"status": 500},
|
||||
"Unexpected status code 500 when submitting snapshot analytics",
|
||||
),
|
||||
(
|
||||
{"exc": TimeoutError()},
|
||||
"Timeout sending snapshot analytics",
|
||||
),
|
||||
(
|
||||
{"exc": aiohttp.ClientError()},
|
||||
"Error sending snapshot analytics",
|
||||
),
|
||||
],
|
||||
ids=[
|
||||
"bad_request",
|
||||
"service_unavailable",
|
||||
"unexpected_status",
|
||||
"timeout",
|
||||
"client_error",
|
||||
],
|
||||
)
|
||||
async def test_send_snapshot_error(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
post_kwargs: dict[str, Any],
|
||||
expected_log: str,
|
||||
) -> None:
|
||||
"""Test snapshot submission error."""
|
||||
aioclient_mock.post(ANALYTICS_SNAPSHOT_ENDPOINT_URL, **post_kwargs)
|
||||
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
|
||||
"uuid": "12345",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.send_snapshot()
|
||||
|
||||
assert expected_log in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
|
||||
async def test_async_schedule(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test scheduling."""
|
||||
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200)
|
||||
aioclient_mock.post(ANALYTICS_SNAPSHOT_ENDPOINT_URL, status=200, json={})
|
||||
|
||||
analytics = Analytics(hass)
|
||||
|
||||
# Schedule when not onboarded
|
||||
await analytics.async_schedule()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
# Onboard and enable both
|
||||
await analytics.save_preferences({ATTR_BASE: True, ATTR_SNAPSHOTS: True})
|
||||
|
||||
await analytics.async_schedule()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert any(
|
||||
str(call[1]) == ANALYTICS_ENDPOINT_URL_DEV for call in aioclient_mock.mock_calls
|
||||
)
|
||||
assert any(
|
||||
str(call[1]) == ANALYTICS_SNAPSHOT_ENDPOINT_URL
|
||||
for call in aioclient_mock.mock_calls
|
||||
)
|
||||
|
||||
preferences = await analytics._store.async_load()
|
||||
assert preferences["snapshot_submission_time"] is not None
|
||||
assert 0 <= preferences["snapshot_submission_time"] <= 86400
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock")
|
||||
async def test_async_schedule_disabled(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test scheduling when disabled."""
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: False, ATTR_SNAPSHOTS: False},
|
||||
"uuid": "12345",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.async_schedule()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("supervisor_client")
|
||||
async def test_async_schedule_snapshots_not_dev(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test that snapshots are not scheduled on non-dev versions."""
|
||||
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
|
||||
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
|
||||
"uuid": "12345",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.async_schedule()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert str(aioclient_mock.mock_calls[0][1]) == ANALYTICS_ENDPOINT_URL
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock", "supervisor_client")
|
||||
async def test_async_schedule_already_scheduled(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
) -> None:
|
||||
"""Test not rescheduled if already scheduled."""
|
||||
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200)
|
||||
aioclient_mock.post(ANALYTICS_SNAPSHOT_ENDPOINT_URL, status=200, json={})
|
||||
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
|
||||
"uuid": "12345",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.async_schedule()
|
||||
await analytics.async_schedule()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 2
|
||||
|
||||
assert any(
|
||||
str(call[1]) == ANALYTICS_ENDPOINT_URL_DEV for call in aioclient_mock.mock_calls
|
||||
)
|
||||
assert any(
|
||||
str(call[1]) == ANALYTICS_SNAPSHOT_ENDPOINT_URL
|
||||
for call in aioclient_mock.mock_calls
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("onboarded"), [True, False])
|
||||
@pytest.mark.usefixtures("ha_dev_version_mock")
|
||||
async def test_async_schedule_cancel_when_disabled(
|
||||
hass: HomeAssistant,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
onboarded: bool,
|
||||
) -> None:
|
||||
"""Test that scheduled tasks are cancelled when disabled."""
|
||||
analytics = Analytics(hass)
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": True,
|
||||
"preferences": {ATTR_BASE: True, ATTR_SNAPSHOTS: True},
|
||||
"uuid": "12345",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.async_schedule()
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.storage.Store.async_load",
|
||||
return_value={
|
||||
"onboarded": onboarded,
|
||||
"preferences": {ATTR_BASE: False, ATTR_SNAPSHOTS: False},
|
||||
"uuid": "12345",
|
||||
},
|
||||
):
|
||||
await analytics.load()
|
||||
|
||||
await analytics.async_schedule()
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=25))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(aioclient_mock.mock_calls) == 0
|
||||
|
||||
@@ -45,7 +45,6 @@ async def test_websocket(
|
||||
{"type": "analytics/preferences", "preferences": {"base": True}}
|
||||
)
|
||||
response = await ws_client.receive_json()
|
||||
assert len(aioclient_mock.mock_calls) == 1
|
||||
assert response["result"]["preferences"]["base"]
|
||||
|
||||
await ws_client.send_json_auto_id({"type": "analytics"})
|
||||
|
||||
@@ -1,4 +1,244 @@
|
||||
# serializer version: 1
|
||||
# name: test_deprecated_sensor_issue[apc-apc_deprecated]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'apc_deprecated_sensor.myups_status_data',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'apc_deprecated',
|
||||
'translation_placeholders': dict({
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_status_data',
|
||||
'entity_name': 'Status data',
|
||||
'items': '''
|
||||
- [APC UPS automation (apc)](/config/automation/edit/apcupsd_auto_apc)
|
||||
- [APC UPS script (apc)](/config/script/edit/apcupsd_script_apc)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[apcmodel-available_via_device_info]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'available_via_device_info_sensor.myups_model',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'available_via_device_info',
|
||||
'translation_placeholders': dict({
|
||||
'available_via_device_attr': 'model',
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_model',
|
||||
'entity_name': 'Model',
|
||||
'items': '''
|
||||
- [APC UPS automation (apcmodel)](/config/automation/edit/apcupsd_auto_apcmodel)
|
||||
- [APC UPS script (apcmodel)](/config/script/edit/apcupsd_script_apcmodel)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[date-date_deprecated]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'date_deprecated_sensor.myups_status_date',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'date_deprecated',
|
||||
'translation_placeholders': dict({
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_status_date',
|
||||
'entity_name': 'Status date',
|
||||
'items': '''
|
||||
- [APC UPS automation (date)](/config/automation/edit/apcupsd_auto_date)
|
||||
- [APC UPS script (date)](/config/script/edit/apcupsd_script_date)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[end apc-date_deprecated]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'date_deprecated_sensor.myups_date_and_time',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'date_deprecated',
|
||||
'translation_placeholders': dict({
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_date_and_time',
|
||||
'entity_name': 'Date and time',
|
||||
'items': '''
|
||||
- [APC UPS automation (end apc)](/config/automation/edit/apcupsd_auto_end_apc)
|
||||
- [APC UPS script (end apc)](/config/script/edit/apcupsd_script_end_apc)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[firmware-available_via_device_info]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'available_via_device_info_sensor.myups_firmware_version',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'available_via_device_info',
|
||||
'translation_placeholders': dict({
|
||||
'available_via_device_attr': 'hw_version',
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_firmware_version',
|
||||
'entity_name': 'Firmware version',
|
||||
'items': '''
|
||||
- [APC UPS automation (firmware)](/config/automation/edit/apcupsd_auto_firmware)
|
||||
- [APC UPS script (firmware)](/config/script/edit/apcupsd_script_firmware)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[model-available_via_device_info]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'available_via_device_info_sensor.myups_model_2',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'available_via_device_info',
|
||||
'translation_placeholders': dict({
|
||||
'available_via_device_attr': 'model',
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_model_2',
|
||||
'entity_name': 'Model',
|
||||
'items': '''
|
||||
- [APC UPS automation (model)](/config/automation/edit/apcupsd_auto_model)
|
||||
- [APC UPS script (model)](/config/script/edit/apcupsd_script_model)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[serialno-available_via_device_info]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'available_via_device_info_sensor.myups_serial_number',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'available_via_device_info',
|
||||
'translation_placeholders': dict({
|
||||
'available_via_device_attr': 'serial_number',
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_serial_number',
|
||||
'entity_name': 'Serial number',
|
||||
'items': '''
|
||||
- [APC UPS automation (serialno)](/config/automation/edit/apcupsd_auto_serialno)
|
||||
- [APC UPS script (serialno)](/config/script/edit/apcupsd_script_serialno)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[upsname-available_via_device_info]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'available_via_device_info_sensor.myups_name',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'available_via_device_info',
|
||||
'translation_placeholders': dict({
|
||||
'available_via_device_attr': 'name',
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_name',
|
||||
'entity_name': 'Name',
|
||||
'items': '''
|
||||
- [APC UPS automation (upsname)](/config/automation/edit/apcupsd_auto_upsname)
|
||||
- [APC UPS script (upsname)](/config/script/edit/apcupsd_script_upsname)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_deprecated_sensor_issue[version-available_via_device_info]
|
||||
IssueRegistryItemSnapshot({
|
||||
'active': True,
|
||||
'breaks_in_ha_version': '2026.6.0',
|
||||
'created': <ANY>,
|
||||
'data': None,
|
||||
'dismissed_version': None,
|
||||
'domain': 'apcupsd',
|
||||
'is_fixable': False,
|
||||
'is_persistent': False,
|
||||
'issue_domain': None,
|
||||
'issue_id': 'available_via_device_info_sensor.myups_daemon_version',
|
||||
'learn_more_url': None,
|
||||
'severity': <IssueSeverity.WARNING: 'warning'>,
|
||||
'translation_key': 'available_via_device_info',
|
||||
'translation_placeholders': dict({
|
||||
'available_via_device_attr': 'sw_version',
|
||||
'device_id': '<ANY>',
|
||||
'entity_id': 'sensor.myups_daemon_version',
|
||||
'entity_name': 'Daemon version',
|
||||
'items': '''
|
||||
- [APC UPS automation (version)](/config/automation/edit/apcupsd_auto_version)
|
||||
- [APC UPS script (version)](/config/script/edit/apcupsd_script_version)
|
||||
''',
|
||||
}),
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[sensor.myups_alarm_delay-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -6,7 +6,8 @@ from unittest.mock import AsyncMock
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.apcupsd.const import DOMAIN
|
||||
from homeassistant.components import automation, script
|
||||
from homeassistant.components.apcupsd.const import DEPRECATED_SENSORS, DOMAIN
|
||||
from homeassistant.components.apcupsd.coordinator import REQUEST_REFRESH_COOLDOWN
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
@@ -15,7 +16,11 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.dt import utcnow
|
||||
@@ -161,3 +166,76 @@ async def test_sensor_unknown(
|
||||
await hass.async_block_till_done()
|
||||
# The state should become unknown again.
|
||||
assert hass.states.get(last_self_test_id).state == STATE_UNKNOWN
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("entity_key", "issue_key"), DEPRECATED_SENSORS.items())
|
||||
async def test_deprecated_sensor_issue(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_request_status: AsyncMock,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_key: str,
|
||||
issue_key: str,
|
||||
) -> None:
|
||||
"""Ensure the issue lists automations and scripts referencing a deprecated sensor."""
|
||||
issue_registry = ir.async_get(hass)
|
||||
unique_id = f"{mock_request_status.return_value['SERIALNO']}_{entity_key}"
|
||||
entity_id = entity_registry.async_get_entity_id("sensor", DOMAIN, unique_id)
|
||||
assert entity_id
|
||||
|
||||
# No issue yet.
|
||||
issue_id = f"{issue_key}_{entity_id}"
|
||||
assert issue_registry.async_get_issue(DOMAIN, issue_id) is None
|
||||
|
||||
# Add automations and scripts referencing the deprecated sensor.
|
||||
entity_slug = slugify(entity_key)
|
||||
automation_object_id = f"apcupsd_auto_{entity_slug}"
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: {
|
||||
"id": automation_object_id,
|
||||
"alias": f"APC UPS automation ({entity_key})",
|
||||
"trigger": {"platform": "state", "entity_id": entity_id},
|
||||
"action": {
|
||||
"action": "automation.turn_on",
|
||||
"target": {"entity_id": f"automation.{automation_object_id}"},
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
script.DOMAIN,
|
||||
{
|
||||
script.DOMAIN: {
|
||||
f"apcupsd_script_{entity_slug}": {
|
||||
"alias": f"APC UPS script ({entity_key})",
|
||||
"sequence": [
|
||||
{
|
||||
"condition": "state",
|
||||
"entity_id": entity_id,
|
||||
"state": "on",
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
await hass.config_entries.async_reload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
issue = issue_registry.async_get_issue(DOMAIN, issue_id)
|
||||
# Redact the device ID in the placeholder for consistency.
|
||||
issue.translation_placeholders["device_id"] = "<ANY>"
|
||||
assert issue == snapshot
|
||||
|
||||
await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Assert the issue is no longer present.
|
||||
assert not issue_registry.async_get_issue(DOMAIN, issue_id)
|
||||
assert len(issue_registry.issues) == 0
|
||||
|
||||
@@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pypck
|
||||
from pypck import lcn_defs
|
||||
from pypck.module import GroupConnection, ModuleConnection, Serials
|
||||
from pypck.device import DeviceConnection, Serials
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.lcn import PchkConnectionManager
|
||||
@@ -22,7 +22,7 @@ from tests.common import MockConfigEntry, load_fixture
|
||||
LATEST_CONFIG_ENTRY_VERSION = (LcnFlowHandler.VERSION, LcnFlowHandler.MINOR_VERSION)
|
||||
|
||||
|
||||
class MockModuleConnection(ModuleConnection):
|
||||
class MockDeviceConnection(DeviceConnection):
|
||||
"""Fake a LCN module connection."""
|
||||
|
||||
request_name = AsyncMock(return_value="TestModule")
|
||||
@@ -49,12 +49,6 @@ class MockModuleConnection(ModuleConnection):
|
||||
self._serials_known.set()
|
||||
|
||||
|
||||
class MockGroupConnection(GroupConnection):
|
||||
"""Fake a LCN group connection."""
|
||||
|
||||
send_command = AsyncMock(return_value=True)
|
||||
|
||||
|
||||
class MockPchkConnectionManager(PchkConnectionManager):
|
||||
"""Fake connection handler."""
|
||||
|
||||
@@ -67,15 +61,10 @@ class MockPchkConnectionManager(PchkConnectionManager):
|
||||
async def async_close(self) -> None:
|
||||
"""Mock closing a connection to PCHK."""
|
||||
|
||||
@patch.object(pypck.connection, "ModuleConnection", MockModuleConnection)
|
||||
def get_module_conn(self, addr):
|
||||
"""Get LCN module connection."""
|
||||
return super().get_module_conn(addr)
|
||||
|
||||
@patch.object(pypck.connection, "GroupConnection", MockGroupConnection)
|
||||
def get_group_conn(self, addr):
|
||||
"""Get LCN group connection."""
|
||||
return super().get_group_conn(addr)
|
||||
@patch.object(pypck.connection, "DeviceConnection", MockDeviceConnection)
|
||||
def get_device_connection(self, addr):
|
||||
"""Get LCN device connection."""
|
||||
return super().get_device_connection(addr)
|
||||
|
||||
scan_modules = AsyncMock()
|
||||
send_command = AsyncMock()
|
||||
|
||||
@@ -29,7 +29,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import MockConfigEntry, MockModuleConnection, init_integration
|
||||
from .conftest import MockConfigEntry, MockDeviceConnection, init_integration
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
@@ -51,7 +51,7 @@ async def test_set_hvac_mode_heat(hass: HomeAssistant, entry: MockConfigEntry) -
|
||||
"""Test the hvac mode is set to heat."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
with patch.object(MockDeviceConnection, "lock_regulator") as lock_regulator:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_CLIMATE,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
@@ -106,7 +106,7 @@ async def test_set_hvac_mode_off(hass: HomeAssistant, entry: MockConfigEntry) ->
|
||||
"""Test the hvac mode is set off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
with patch.object(MockDeviceConnection, "lock_regulator") as lock_regulator:
|
||||
state = hass.states.get("climate.testmodule_climate1")
|
||||
state.state = HVACMode.HEAT
|
||||
|
||||
@@ -154,7 +154,7 @@ async def test_set_temperature(hass: HomeAssistant, entry: MockConfigEntry) -> N
|
||||
"""Test the temperature is set."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "var_abs") as var_abs:
|
||||
with patch.object(MockDeviceConnection, "var_abs") as var_abs:
|
||||
state = hass.states.get("climate.testmodule_climate1")
|
||||
state.state = HVACMode.HEAT
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import MockConfigEntry, MockModuleConnection, init_integration
|
||||
from .conftest import MockConfigEntry, MockDeviceConnection, init_integration
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
@@ -60,7 +60,7 @@ async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_outputs"
|
||||
MockDeviceConnection, "control_motor_outputs"
|
||||
) as control_motor_outputs:
|
||||
state = hass.states.get(COVER_OUTPUTS)
|
||||
assert state is not None
|
||||
@@ -109,7 +109,7 @@ async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> Non
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_outputs"
|
||||
MockDeviceConnection, "control_motor_outputs"
|
||||
) as control_motor_outputs:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
@@ -161,7 +161,7 @@ async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_outputs"
|
||||
MockDeviceConnection, "control_motor_outputs"
|
||||
) as control_motor_outputs:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
@@ -209,7 +209,7 @@ async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None:
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_relays"
|
||||
MockDeviceConnection, "control_motor_relays"
|
||||
) as control_motor_relays:
|
||||
state = hass.states.get(COVER_RELAYS)
|
||||
assert state is not None
|
||||
@@ -258,7 +258,7 @@ async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_relays"
|
||||
MockDeviceConnection, "control_motor_relays"
|
||||
) as control_motor_relays:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
@@ -310,7 +310,7 @@ async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None:
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_relays"
|
||||
MockDeviceConnection, "control_motor_relays"
|
||||
) as control_motor_relays:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_COVER,
|
||||
@@ -375,7 +375,7 @@ async def test_relays_set_position(
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(
|
||||
MockModuleConnection, "control_motor_relays_position"
|
||||
MockDeviceConnection, "control_motor_relays_position"
|
||||
) as control_motor_relays_position:
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import MockConfigEntry, MockModuleConnection, init_integration
|
||||
from .conftest import MockConfigEntry, MockDeviceConnection, init_integration
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
@@ -51,7 +51,7 @@ async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> No
|
||||
"""Test the output light turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "toggle_output") as toggle_output:
|
||||
with patch.object(MockDeviceConnection, "toggle_output") as toggle_output:
|
||||
# command failed
|
||||
toggle_output.return_value = False
|
||||
|
||||
@@ -92,7 +92,7 @@ async def test_output_turn_on_with_attributes(
|
||||
"""Test the output light turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "dim_output") as dim_output:
|
||||
with patch.object(MockDeviceConnection, "dim_output") as dim_output:
|
||||
dim_output.return_value = True
|
||||
|
||||
await hass.services.async_call(
|
||||
@@ -117,7 +117,7 @@ async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> N
|
||||
"""Test the output light turns off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "toggle_output") as toggle_output:
|
||||
with patch.object(MockDeviceConnection, "toggle_output") as toggle_output:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_LIGHT,
|
||||
SERVICE_TURN_ON,
|
||||
@@ -163,7 +163,7 @@ async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> Non
|
||||
"""Test the relay light turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "control_relays") as control_relays:
|
||||
with patch.object(MockDeviceConnection, "control_relays") as control_relays:
|
||||
states = [RelayStateModifier.NOCHANGE] * 8
|
||||
states[0] = RelayStateModifier.ON
|
||||
|
||||
@@ -205,7 +205,7 @@ async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> No
|
||||
"""Test the relay light turns off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "control_relays") as control_relays:
|
||||
with patch.object(MockDeviceConnection, "control_relays") as control_relays:
|
||||
states = [RelayStateModifier.NOCHANGE] * 8
|
||||
states[0] = RelayStateModifier.OFF
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import MockConfigEntry, MockModuleConnection, init_integration
|
||||
from .conftest import MockConfigEntry, MockDeviceConnection, init_integration
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
@@ -39,7 +39,7 @@ async def test_scene_activate(
|
||||
) -> None:
|
||||
"""Test the scene is activated."""
|
||||
await init_integration(hass, entry)
|
||||
with patch.object(MockModuleConnection, "activate_scene") as activate_scene:
|
||||
with patch.object(MockDeviceConnection, "activate_scene") as activate_scene:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SCENE,
|
||||
SERVICE_TURN_ON,
|
||||
|
||||
@@ -35,7 +35,7 @@ from homeassistant.setup import async_setup_component
|
||||
|
||||
from .conftest import (
|
||||
MockConfigEntry,
|
||||
MockModuleConnection,
|
||||
MockDeviceConnection,
|
||||
get_device,
|
||||
init_integration,
|
||||
)
|
||||
@@ -49,7 +49,7 @@ async def test_service_output_abs(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "dim_output") as dim_output:
|
||||
with patch.object(MockDeviceConnection, "dim_output") as dim_output:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.OUTPUT_ABS,
|
||||
@@ -73,7 +73,7 @@ async def test_service_output_rel(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "rel_output") as rel_output:
|
||||
with patch.object(MockDeviceConnection, "rel_output") as rel_output:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.OUTPUT_REL,
|
||||
@@ -96,7 +96,7 @@ async def test_service_output_toggle(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "toggle_output") as toggle_output:
|
||||
with patch.object(MockDeviceConnection, "toggle_output") as toggle_output:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.OUTPUT_TOGGLE,
|
||||
@@ -119,7 +119,7 @@ async def test_service_relays(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "control_relays") as control_relays:
|
||||
with patch.object(MockDeviceConnection, "control_relays") as control_relays:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.RELAYS,
|
||||
@@ -137,7 +137,7 @@ async def test_service_relays(
|
||||
|
||||
# wrong states string
|
||||
with (
|
||||
patch.object(MockModuleConnection, "control_relays") as control_relays,
|
||||
patch.object(MockDeviceConnection, "control_relays") as control_relays,
|
||||
pytest.raises(HomeAssistantError) as exc_info,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
@@ -161,7 +161,7 @@ async def test_service_led(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "control_led") as control_led:
|
||||
with patch.object(MockDeviceConnection, "control_led") as control_led:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.LED,
|
||||
@@ -187,7 +187,7 @@ async def test_service_var_abs(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "var_abs") as var_abs:
|
||||
with patch.object(MockDeviceConnection, "var_abs") as var_abs:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.VAR_ABS,
|
||||
@@ -213,7 +213,7 @@ async def test_service_var_rel(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "var_rel") as var_rel:
|
||||
with patch.object(MockDeviceConnection, "var_rel") as var_rel:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.VAR_REL,
|
||||
@@ -243,7 +243,7 @@ async def test_service_var_reset(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "var_reset") as var_reset:
|
||||
with patch.object(MockDeviceConnection, "var_reset") as var_reset:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.VAR_RESET,
|
||||
@@ -265,7 +265,7 @@ async def test_service_lock_regulator(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
with patch.object(MockDeviceConnection, "lock_regulator") as lock_regulator:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.LOCK_REGULATOR,
|
||||
@@ -288,7 +288,7 @@ async def test_service_send_keys(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "send_keys") as send_keys:
|
||||
with patch.object(MockDeviceConnection, "send_keys") as send_keys:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.SEND_KEYS,
|
||||
@@ -323,7 +323,7 @@ async def test_service_send_keys_hit_deferred(
|
||||
|
||||
# success
|
||||
with patch.object(
|
||||
MockModuleConnection, "send_keys_hit_deferred"
|
||||
MockDeviceConnection, "send_keys_hit_deferred"
|
||||
) as send_keys_hit_deferred:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
@@ -344,7 +344,7 @@ async def test_service_send_keys_hit_deferred(
|
||||
# wrong key action
|
||||
with (
|
||||
patch.object(
|
||||
MockModuleConnection, "send_keys_hit_deferred"
|
||||
MockDeviceConnection, "send_keys_hit_deferred"
|
||||
) as send_keys_hit_deferred,
|
||||
pytest.raises(ServiceValidationError) as exc_info,
|
||||
):
|
||||
@@ -372,7 +372,7 @@ async def test_service_lock_keys(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_keys") as lock_keys:
|
||||
with patch.object(MockDeviceConnection, "lock_keys") as lock_keys:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.LOCK_KEYS,
|
||||
@@ -391,7 +391,7 @@ async def test_service_lock_keys(
|
||||
|
||||
# wrong states string
|
||||
with (
|
||||
patch.object(MockModuleConnection, "lock_keys") as lock_keys,
|
||||
patch.object(MockDeviceConnection, "lock_keys") as lock_keys,
|
||||
pytest.raises(HomeAssistantError) as exc_info,
|
||||
):
|
||||
await hass.services.async_call(
|
||||
@@ -418,7 +418,7 @@ async def test_service_lock_keys_tab_a_temporary(
|
||||
|
||||
# success
|
||||
with patch.object(
|
||||
MockModuleConnection, "lock_keys_tab_a_temporary"
|
||||
MockDeviceConnection, "lock_keys_tab_a_temporary"
|
||||
) as lock_keys_tab_a_temporary:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
@@ -442,7 +442,7 @@ async def test_service_lock_keys_tab_a_temporary(
|
||||
# wrong table
|
||||
with (
|
||||
patch.object(
|
||||
MockModuleConnection, "lock_keys_tab_a_temporary"
|
||||
MockDeviceConnection, "lock_keys_tab_a_temporary"
|
||||
) as lock_keys_tab_a_temporary,
|
||||
pytest.raises(ServiceValidationError) as exc_info,
|
||||
):
|
||||
@@ -470,7 +470,7 @@ async def test_service_dyn_text(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "dyn_text") as dyn_text:
|
||||
with patch.object(MockDeviceConnection, "dyn_text") as dyn_text:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.DYN_TEXT,
|
||||
@@ -493,7 +493,7 @@ async def test_service_pck(
|
||||
await async_setup_component(hass, "persistent_notification", {})
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "pck") as pck:
|
||||
with patch.object(MockDeviceConnection, "pck") as pck:
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
LcnService.PCK,
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import MockConfigEntry, MockModuleConnection, init_integration
|
||||
from .conftest import MockConfigEntry, MockDeviceConnection, init_integration
|
||||
|
||||
from tests.common import snapshot_platform
|
||||
|
||||
@@ -55,7 +55,7 @@ async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> No
|
||||
"""Test the output switch turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "dim_output") as dim_output:
|
||||
with patch.object(MockDeviceConnection, "dim_output") as dim_output:
|
||||
# command failed
|
||||
dim_output.return_value = False
|
||||
|
||||
@@ -92,7 +92,7 @@ async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> N
|
||||
"""Test the output switch turns off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "dim_output") as dim_output:
|
||||
with patch.object(MockDeviceConnection, "dim_output") as dim_output:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SWITCH,
|
||||
SERVICE_TURN_ON,
|
||||
@@ -136,7 +136,7 @@ async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> Non
|
||||
"""Test the relay switch turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "control_relays") as control_relays:
|
||||
with patch.object(MockDeviceConnection, "control_relays") as control_relays:
|
||||
states = [RelayStateModifier.NOCHANGE] * 8
|
||||
states[0] = RelayStateModifier.ON
|
||||
|
||||
@@ -176,7 +176,7 @@ async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> No
|
||||
"""Test the relay switch turns off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "control_relays") as control_relays:
|
||||
with patch.object(MockDeviceConnection, "control_relays") as control_relays:
|
||||
states = [RelayStateModifier.NOCHANGE] * 8
|
||||
states[0] = RelayStateModifier.OFF
|
||||
|
||||
@@ -225,7 +225,7 @@ async def test_regulatorlock_turn_on(
|
||||
"""Test the regulator lock switch turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
with patch.object(MockDeviceConnection, "lock_regulator") as lock_regulator:
|
||||
# command failed
|
||||
lock_regulator.return_value = False
|
||||
|
||||
@@ -264,7 +264,7 @@ async def test_regulatorlock_turn_off(
|
||||
"""Test the regulator lock switch turns off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator:
|
||||
with patch.object(MockDeviceConnection, "lock_regulator") as lock_regulator:
|
||||
await hass.services.async_call(
|
||||
DOMAIN_SWITCH,
|
||||
SERVICE_TURN_ON,
|
||||
@@ -308,7 +308,7 @@ async def test_keylock_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> N
|
||||
"""Test the keylock switch turns on."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_keys") as lock_keys:
|
||||
with patch.object(MockDeviceConnection, "lock_keys") as lock_keys:
|
||||
states = [KeyLockStateModifier.NOCHANGE] * 8
|
||||
states[0] = KeyLockStateModifier.ON
|
||||
|
||||
@@ -348,7 +348,7 @@ async def test_keylock_turn_off(hass: HomeAssistant, entry: MockConfigEntry) ->
|
||||
"""Test the keylock switch turns off."""
|
||||
await init_integration(hass, entry)
|
||||
|
||||
with patch.object(MockModuleConnection, "lock_keys") as lock_keys:
|
||||
with patch.object(MockDeviceConnection, "lock_keys") as lock_keys:
|
||||
states = [KeyLockStateModifier.NOCHANGE] * 8
|
||||
states[0] = KeyLockStateModifier.OFF
|
||||
|
||||
|
||||
@@ -118,7 +118,7 @@ async def test_lcn_devices_scan_command(
|
||||
"""Test lcn/devices/scan command."""
|
||||
# add new module which is not stored in config_entry
|
||||
lcn_connection = await init_integration(hass, entry)
|
||||
lcn_connection.get_address_conn(LcnAddr(0, 10, False))
|
||||
lcn_connection.get_device_connection(LcnAddr(0, 10, False))
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id({**SCAN_PAYLOAD, "entry_id": entry.entry_id})
|
||||
|
||||
@@ -137,7 +137,7 @@ async def integration_fixture(
|
||||
"switch_unit",
|
||||
"tado_smart_radiator_thermostat_x",
|
||||
"temperature_sensor",
|
||||
"thermostat",
|
||||
"longan_link_thermostat",
|
||||
"vacuum_cleaner",
|
||||
"valve",
|
||||
"window_covering_full",
|
||||
|
||||
@@ -440,6 +440,55 @@
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[longan_link_thermostat][binary_sensor.longan_link_hvac_occupancy-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.longan_link_hvac_occupancy',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.OCCUPANCY: 'occupancy'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Occupancy',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatOccupancySensor-513-2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[longan_link_thermostat][binary_sensor.longan_link_hvac_occupancy-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'occupancy',
|
||||
'friendly_name': 'Longan link HVAC Occupancy',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.longan_link_hvac_occupancy',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[occupancy_sensor][binary_sensor.mock_occupancy_sensor_occupancy-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -1320,55 +1369,6 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[thermostat][binary_sensor.longan_link_hvac_occupancy-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'binary_sensor.longan_link_hvac_occupancy',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <BinarySensorDeviceClass.OCCUPANCY: 'occupancy'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Occupancy',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatOccupancySensor-513-2',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[thermostat][binary_sensor.longan_link_hvac_occupancy-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'occupancy',
|
||||
'friendly_name': 'Longan link HVAC Occupancy',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.longan_link_hvac_occupancy',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensors[valve][binary_sensor.valve_general_fault-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -2000,6 +2000,55 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[longan_link_thermostat][button.longan_link_hvac_identify-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'button.longan_link_hvac_identify',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Identify',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-IdentifyButton-3-1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[longan_link_thermostat][button.longan_link_hvac_identify-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'identify',
|
||||
'friendly_name': 'Longan link HVAC Identify',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'button.longan_link_hvac_identify',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[microwave_oven][button.microwave_oven_pause-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -3457,55 +3506,6 @@
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[thermostat][button.longan_link_hvac_identify-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'button.longan_link_hvac_identify',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Identify',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-IdentifyButton-3-1',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[thermostat][button.longan_link_hvac_identify-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'identify',
|
||||
'friendly_name': 'Longan link HVAC Identify',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'button.longan_link_hvac_identify',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[window_covering_pa_lift][button.longan_link_wncv_da01_identify-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -255,6 +255,76 @@
|
||||
'state': 'heat',
|
||||
})
|
||||
# ---
|
||||
# name: test_climates[longan_link_thermostat][climate.longan_link_hvac-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
<HVACMode.COOL: 'cool'>,
|
||||
<HVACMode.HEAT_COOL: 'heat_cool'>,
|
||||
]),
|
||||
'max_temp': 35,
|
||||
'min_temp': 7,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'climate',
|
||||
'entity_category': None,
|
||||
'entity_id': 'climate.longan_link_hvac',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <ClimateEntityFeature: 387>,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterThermostat-513-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_climates[longan_link_thermostat][climate.longan_link_hvac-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 28.3,
|
||||
'friendly_name': 'Longan link HVAC',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
<HVACMode.COOL: 'cool'>,
|
||||
<HVACMode.HEAT_COOL: 'heat_cool'>,
|
||||
]),
|
||||
'max_temp': 35,
|
||||
'min_temp': 7,
|
||||
'supported_features': <ClimateEntityFeature: 387>,
|
||||
'target_temp_high': None,
|
||||
'target_temp_low': None,
|
||||
'temperature': None,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.longan_link_hvac',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'cool',
|
||||
})
|
||||
# ---
|
||||
# name: test_climates[room_airconditioner][climate.room_airconditioner-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -392,73 +462,3 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_climates[thermostat][climate.longan_link_hvac-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
<HVACMode.COOL: 'cool'>,
|
||||
<HVACMode.HEAT_COOL: 'heat_cool'>,
|
||||
]),
|
||||
'max_temp': 35,
|
||||
'min_temp': 7,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'climate',
|
||||
'entity_category': None,
|
||||
'entity_id': 'climate.longan_link_hvac',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <ClimateEntityFeature: 387>,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterThermostat-513-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_climates[thermostat][climate.longan_link_hvac-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'current_temperature': 28.3,
|
||||
'friendly_name': 'Longan link HVAC',
|
||||
'hvac_modes': list([
|
||||
<HVACMode.OFF: 'off'>,
|
||||
<HVACMode.HEAT: 'heat'>,
|
||||
<HVACMode.COOL: 'cool'>,
|
||||
<HVACMode.HEAT_COOL: 'heat_cool'>,
|
||||
]),
|
||||
'max_temp': 35,
|
||||
'min_temp': 7,
|
||||
'supported_features': <ClimateEntityFeature: 387>,
|
||||
'target_temp_high': None,
|
||||
'target_temp_low': None,
|
||||
'temperature': None,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'climate.longan_link_hvac',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'cool',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -200,6 +200,69 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_fans[longan_link_thermostat][fan.longan_link_hvac-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'preset_modes': list([
|
||||
'low',
|
||||
'medium',
|
||||
'high',
|
||||
'auto',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'fan',
|
||||
'entity_category': None,
|
||||
'entity_id': 'fan.longan_link_hvac',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <FanEntityFeature: 56>,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterFan-514-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_fans[longan_link_thermostat][fan.longan_link_hvac-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Longan link HVAC',
|
||||
'preset_mode': None,
|
||||
'preset_modes': list([
|
||||
'low',
|
||||
'medium',
|
||||
'high',
|
||||
'auto',
|
||||
]),
|
||||
'supported_features': <FanEntityFeature: 56>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'fan.longan_link_hvac',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_fans[room_airconditioner][fan.room_airconditioner-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -267,66 +330,3 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_fans[thermostat][fan.longan_link_hvac-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'preset_modes': list([
|
||||
'low',
|
||||
'medium',
|
||||
'high',
|
||||
'auto',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'fan',
|
||||
'entity_category': None,
|
||||
'entity_id': 'fan.longan_link_hvac',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': <FanEntityFeature: 56>,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterFan-514-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_fans[thermostat][fan.longan_link_hvac-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Longan link HVAC',
|
||||
'preset_mode': None,
|
||||
'preset_modes': list([
|
||||
'low',
|
||||
'medium',
|
||||
'high',
|
||||
'auto',
|
||||
]),
|
||||
'supported_features': <FanEntityFeature: 56>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'fan.longan_link_hvac',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
|
||||
@@ -2127,6 +2127,63 @@
|
||||
'state': 'Low',
|
||||
})
|
||||
# ---
|
||||
# name: test_selects[longan_link_thermostat][select.longan_link_hvac_temperature_display_mode-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'Celsius',
|
||||
'Fahrenheit',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'select.longan_link_hvac_temperature_display_mode',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Temperature display mode',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'temperature_display_mode',
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-TrvTemperatureDisplayMode-516-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_selects[longan_link_thermostat][select.longan_link_hvac_temperature_display_mode-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Longan link HVAC Temperature display mode',
|
||||
'options': list([
|
||||
'Celsius',
|
||||
'Fahrenheit',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'select.longan_link_hvac_temperature_display_mode',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'Celsius',
|
||||
})
|
||||
# ---
|
||||
# name: test_selects[microwave_oven][select.microwave_oven_power_level_w-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -3826,63 +3883,6 @@
|
||||
'state': 'Quick',
|
||||
})
|
||||
# ---
|
||||
# name: test_selects[thermostat][select.longan_link_hvac_temperature_display_mode-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'options': list([
|
||||
'Celsius',
|
||||
'Fahrenheit',
|
||||
]),
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'select',
|
||||
'entity_category': <EntityCategory.CONFIG: 'config'>,
|
||||
'entity_id': 'select.longan_link_hvac_temperature_display_mode',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Temperature display mode',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'temperature_display_mode',
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-TrvTemperatureDisplayMode-516-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_selects[thermostat][select.longan_link_hvac_temperature_display_mode-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'Longan link HVAC Temperature display mode',
|
||||
'options': list([
|
||||
'Celsius',
|
||||
'Fahrenheit',
|
||||
]),
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'select.longan_link_hvac_temperature_display_mode',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'Celsius',
|
||||
})
|
||||
# ---
|
||||
# name: test_selects[vacuum_cleaner][select.mock_vacuum_clean_mode-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -6974,6 +6974,118 @@
|
||||
'state': '1.3',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[longan_link_thermostat][sensor.longan_link_hvac_outdoor_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.longan_link_hvac_outdoor_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Outdoor temperature',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'outdoor_temperature',
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatOutdoorTemperature-513-1',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[longan_link_thermostat][sensor.longan_link_hvac_outdoor_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Longan link HVAC Outdoor temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.longan_link_hvac_outdoor_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '12.5',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[longan_link_thermostat][sensor.longan_link_hvac_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.longan_link_hvac_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Temperature',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatLocalTemperature-513-0',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[longan_link_thermostat][sensor.longan_link_hvac_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Longan link HVAC Temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.longan_link_hvac_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '28.3',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[microwave_oven][sensor.microwave_oven_estimated_end_time-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -10451,118 +10563,6 @@
|
||||
'state': '21.0',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[thermostat][sensor.longan_link_hvac_outdoor_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.longan_link_hvac_outdoor_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Outdoor temperature',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'outdoor_temperature',
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatOutdoorTemperature-513-1',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[thermostat][sensor.longan_link_hvac_outdoor_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Longan link HVAC Outdoor temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.longan_link_hvac_outdoor_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '12.5',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[thermostat][sensor.longan_link_hvac_temperature-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
}),
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.longan_link_hvac_temperature',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
'sensor': dict({
|
||||
'suggested_display_precision': 1,
|
||||
}),
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
|
||||
'original_icon': None,
|
||||
'original_name': 'Temperature',
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-ThermostatLocalTemperature-513-0',
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[thermostat][sensor.longan_link_hvac_temperature-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'temperature',
|
||||
'friendly_name': 'Longan link HVAC Temperature',
|
||||
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
|
||||
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.longan_link_hvac_temperature',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '28.3',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensors[vacuum_cleaner][sensor.mock_vacuum_estimated_end_time-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -633,6 +633,55 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_switches[longan_link_thermostat][switch.longan_link_hvac-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.longan_link_hvac',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SwitchDeviceClass.OUTLET: 'outlet'>,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterSwitch-6-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_switches[longan_link_thermostat][switch.longan_link_hvac-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'outlet',
|
||||
'friendly_name': 'Longan link HVAC',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.longan_link_hvac',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_switches[on_off_plugin_unit][switch.mock_onoffpluginunit-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -1072,55 +1121,6 @@
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_switches[thermostat][switch.longan_link_hvac-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'switch',
|
||||
'entity_category': None,
|
||||
'entity_id': 'switch.longan_link_hvac',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SwitchDeviceClass.OUTLET: 'outlet'>,
|
||||
'original_icon': None,
|
||||
'original_name': None,
|
||||
'platform': 'matter',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '00000000000004D2-0000000000000004-MatterNodeDevice-1-MatterSwitch-6-0',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_switches[thermostat][switch.longan_link_hvac-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'outlet',
|
||||
'friendly_name': 'Longan link HVAC',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'switch.longan_link_hvac',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_switches[yandex_smart_socket][switch.yndx_00540-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
|
||||
@@ -388,7 +388,7 @@ async def test_water_valve(
|
||||
assert state.state == "on"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_fixture", ["thermostat"])
|
||||
@pytest.mark.parametrize("node_fixture", ["longan_link_thermostat"])
|
||||
async def test_thermostat_occupancy(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
||||
@@ -30,7 +30,7 @@ async def test_climates(
|
||||
snapshot_matter_entities(hass, entity_registry, snapshot, Platform.CLIMATE)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_fixture", ["thermostat"])
|
||||
@pytest.mark.parametrize("node_fixture", ["longan_link_thermostat"])
|
||||
async def test_thermostat_base(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@@ -162,7 +162,7 @@ async def test_thermostat_base(
|
||||
assert state.attributes["temperature"] == 20
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_fixture", ["thermostat"])
|
||||
@pytest.mark.parametrize("node_fixture", ["longan_link_thermostat"])
|
||||
async def test_thermostat_humidity(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@@ -215,7 +215,7 @@ async def test_thermostat_humidity(
|
||||
assert "current_humidity" not in state.attributes
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_fixture", ["thermostat"])
|
||||
@pytest.mark.parametrize("node_fixture", ["longan_link_thermostat"])
|
||||
async def test_thermostat_service_calls(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
||||
@@ -233,7 +233,7 @@ async def test_eve_thermo_sensor(
|
||||
assert state.state == "18.0"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_fixture", ["thermostat"])
|
||||
@pytest.mark.parametrize("node_fixture", ["longan_link_thermostat"])
|
||||
async def test_thermostat_outdoor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
||||
622
tests/components/mobile_app/test_pending_updates.py
Normal file
622
tests/components/mobile_app/test_pending_updates.py
Normal file
@@ -0,0 +1,622 @@
|
||||
"""Tests for mobile_app pending updates functionality."""
|
||||
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.test_utils import TestClient
|
||||
|
||||
from homeassistant.const import PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
|
||||
async def test_pending_update_applied_when_entity_enabled(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that updates sent while disabled are applied when entity is re-enabled."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
"unit_of_measurement": PERCENTAGE,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "100"
|
||||
|
||||
# Disable the entity
|
||||
entity_registry.async_update_entity(
|
||||
"sensor.test_1_battery_state", disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Send update while disabled
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 50,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
"unit_of_measurement": PERCENTAGE,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-enable the entity
|
||||
entity_registry.async_update_entity("sensor.test_1_battery_state", disabled_by=None)
|
||||
|
||||
# Reload the config entry to trigger entity re-creation
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify the update sent while disabled was applied
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "50"
|
||||
|
||||
|
||||
async def test_pending_update_with_attributes(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that pending updates preserve all attributes."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
"attributes": {"charging": True, "voltage": 4.2},
|
||||
"icon": "mdi:battery-charging",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Disable the entity
|
||||
entity_registry.async_update_entity(
|
||||
"sensor.test_1_battery_state", disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Send update with different attributes while disabled
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 50,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
"attributes": {"charging": False, "voltage": 3.7},
|
||||
"icon": "mdi:battery-50",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-enable the entity
|
||||
entity_registry.async_update_entity("sensor.test_1_battery_state", disabled_by=None)
|
||||
|
||||
# Reload the config entry
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify all attributes were applied
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "50"
|
||||
assert entity.attributes["charging"] is False
|
||||
assert entity.attributes["voltage"] == 3.7
|
||||
assert entity.attributes["icon"] == "mdi:battery-50"
|
||||
|
||||
|
||||
async def test_pending_update_overwritten_by_newer_update(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that newer pending updates overwrite older ones."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Disable the entity
|
||||
entity_registry.async_update_entity(
|
||||
"sensor.test_1_battery_state", disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Send first update while disabled
|
||||
await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 75,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Send second update while still disabled - should overwrite
|
||||
await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 25,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-enable the entity
|
||||
entity_registry.async_update_entity("sensor.test_1_battery_state", disabled_by=None)
|
||||
|
||||
# Reload the config entry
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify the latest update was applied (25, not 75)
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "25"
|
||||
|
||||
|
||||
async def test_pending_update_not_stored_on_enabled_entities(
|
||||
hass: HomeAssistant,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that enabled entities receive updates immediately."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "100"
|
||||
|
||||
# Send update while enabled - should apply immediately
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 50,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify update was applied immediately
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "50"
|
||||
|
||||
|
||||
async def test_pending_update_fallback_to_restore_state(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that restored state is used when no pending update exists."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "100"
|
||||
|
||||
# Update to a new state
|
||||
await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "update_sensor_states",
|
||||
"data": [
|
||||
{
|
||||
"state": 75,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "75"
|
||||
|
||||
# Reload without pending updates
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify restored state was used
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "75"
|
||||
|
||||
|
||||
async def test_multiple_pending_updates_for_different_sensors(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that multiple sensors can be updated while disabled and applied when re-enabled."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register two sensors
|
||||
for unique_id, state in (("battery_state", 100), ("battery_temp", 25)):
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": unique_id.replace("_", " ").title(),
|
||||
"state": state,
|
||||
"type": "sensor",
|
||||
"unique_id": unique_id,
|
||||
},
|
||||
},
|
||||
)
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Disable both entities
|
||||
entity_registry.async_update_entity(
|
||||
"sensor.test_1_battery_state", disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
entity_registry.async_update_entity(
|
||||
"sensor.test_1_battery_temp", disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Send updates for both while disabled
|
||||
await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 50,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery Temp",
|
||||
"state": 30,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_temp",
|
||||
},
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-enable both entities
|
||||
entity_registry.async_update_entity("sensor.test_1_battery_state", disabled_by=None)
|
||||
entity_registry.async_update_entity("sensor.test_1_battery_temp", disabled_by=None)
|
||||
|
||||
# Reload the config entry
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify both updates sent while disabled were applied
|
||||
battery_state = hass.states.get("sensor.test_1_battery_state")
|
||||
battery_temp = hass.states.get("sensor.test_1_battery_temp")
|
||||
|
||||
assert battery_state is not None
|
||||
assert battery_state.state == "50"
|
||||
assert battery_temp is not None
|
||||
assert battery_temp.state == "30"
|
||||
|
||||
|
||||
async def test_update_sensor_states_with_pending_updates(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that update_sensor_states updates are applied when entity is re-enabled."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
"unit_of_measurement": PERCENTAGE,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "100"
|
||||
|
||||
# Disable the entity
|
||||
entity_registry.async_update_entity(
|
||||
"sensor.test_1_battery_state", disabled_by=er.RegistryEntryDisabler.USER
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Use update_sensor_states while disabled
|
||||
resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "update_sensor_states",
|
||||
"data": [
|
||||
{
|
||||
"state": 75,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-enable the entity
|
||||
entity_registry.async_update_entity("sensor.test_1_battery_state", disabled_by=None)
|
||||
|
||||
# Reload the config entry to trigger entity re-creation
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify the update sent while disabled was applied
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "75"
|
||||
|
||||
|
||||
async def test_update_sensor_states_always_stores_pending(
|
||||
hass: HomeAssistant,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that update_sensor_states applies updates to enabled entities."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Battery State",
|
||||
"state": 100,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "100"
|
||||
|
||||
# Use update_sensor_states while enabled
|
||||
resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "update_sensor_states",
|
||||
"data": [
|
||||
{
|
||||
"state": 50,
|
||||
"type": "sensor",
|
||||
"unique_id": "battery_state",
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify update was applied
|
||||
entity = hass.states.get("sensor.test_1_battery_state")
|
||||
assert entity is not None
|
||||
assert entity.state == "50"
|
||||
|
||||
|
||||
async def test_binary_sensor_pending_update(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
create_registrations: tuple[dict[str, Any], dict[str, Any]],
|
||||
webhook_client: TestClient,
|
||||
) -> None:
|
||||
"""Test that binary sensor updates are applied when entity is re-enabled."""
|
||||
webhook_id = create_registrations[1]["webhook_id"]
|
||||
webhook_url = f"/api/webhook/{webhook_id}"
|
||||
|
||||
# Register a binary sensor
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Motion Detected",
|
||||
"state": False,
|
||||
"type": "binary_sensor",
|
||||
"unique_id": "motion_sensor",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity = hass.states.get("binary_sensor.test_1_motion_detected")
|
||||
assert entity is not None
|
||||
assert entity.state == "off"
|
||||
|
||||
# Disable the entity
|
||||
entity_registry.async_update_entity(
|
||||
"binary_sensor.test_1_motion_detected",
|
||||
disabled_by=er.RegistryEntryDisabler.USER,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Send update while disabled
|
||||
reg_resp = await webhook_client.post(
|
||||
webhook_url,
|
||||
json={
|
||||
"type": "register_sensor",
|
||||
"data": {
|
||||
"name": "Motion Detected",
|
||||
"state": True,
|
||||
"type": "binary_sensor",
|
||||
"unique_id": "motion_sensor",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert reg_resp.status == HTTPStatus.CREATED
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Re-enable the entity
|
||||
entity_registry.async_update_entity(
|
||||
"binary_sensor.test_1_motion_detected", disabled_by=None
|
||||
)
|
||||
|
||||
# Reload the config entry
|
||||
config_entry = hass.config_entries.async_entries("mobile_app")[1]
|
||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify the update sent while disabled was applied
|
||||
entity = hass.states.get("binary_sensor.test_1_motion_detected")
|
||||
assert entity is not None
|
||||
assert entity.state == "on"
|
||||
@@ -30,8 +30,7 @@ from homeassistant.components.media_player import (
|
||||
SERVICE_UNJOIN,
|
||||
MediaPlayerEntityFeature,
|
||||
)
|
||||
from homeassistant.components.music_assistant.const import DOMAIN
|
||||
from homeassistant.components.music_assistant.media_player import (
|
||||
from homeassistant.components.music_assistant.const import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ANNOUNCE_VOLUME,
|
||||
ATTR_ARTIST,
|
||||
@@ -42,6 +41,9 @@ from homeassistant.components.music_assistant.media_player import (
|
||||
ATTR_SOURCE_PLAYER,
|
||||
ATTR_URL,
|
||||
ATTR_USE_PRE_ANNOUNCE,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.music_assistant.services import (
|
||||
SERVICE_GET_QUEUE,
|
||||
SERVICE_PLAY_ANNOUNCEMENT,
|
||||
SERVICE_PLAY_MEDIA_ADVANCED,
|
||||
|
||||
@@ -56,6 +56,21 @@ def mock_nsapi() -> Generator[AsyncMock]:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_single_trip_nsapi(mock_nsapi: AsyncMock) -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
trips_data = load_json_object_fixture("trip_single.json", DOMAIN)
|
||||
mock_nsapi.get_trips.return_value = [Trip(trip) for trip in trips_data["trips"]]
|
||||
return mock_nsapi
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_no_trips_nsapi(mock_nsapi: AsyncMock) -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
mock_nsapi.get_trips.return_value = []
|
||||
return mock_nsapi
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Mock config entry."""
|
||||
|
||||
@@ -0,0 +1,856 @@
|
||||
{
|
||||
"source": "HARP",
|
||||
"trips": [
|
||||
{
|
||||
"idx": 2,
|
||||
"uid": "arnu|fromStation=8400058|requestedFromStation=8400058|toStation=8400530|requestedToStation=8400530|viaStation=8400319|plannedFromTime=2025-09-15T16:34:00+02:00|plannedArrivalTime=2025-09-15T18:45:00+02:00|excludeHighSpeedTrains=false|searchForAccessibleTrip=false|localTrainsOnly=false|disabledTransportModalities=BUS,FERRY,TRAM,METRO|travelAssistance=false|tripSummaryHash=1596512355",
|
||||
"ctxRecon": "arnu|fromStation=8400058|requestedFromStation=8400058|toStation=8400530|requestedToStation=8400530|viaStation=8400319|plannedFromTime=2025-09-15T16:34:00+02:00|plannedArrivalTime=2025-09-15T18:45:00+02:00|excludeHighSpeedTrains=false|searchForAccessibleTrip=false|localTrainsOnly=false|disabledTransportModalities=BUS,FERRY,TRAM,METRO|travelAssistance=false|tripSummaryHash=1596512355",
|
||||
"sourceCtxRecon": "¶HKI¶T$A=1@O=Amsterdam Centraal@L=1100836@a=128@$A=1@O='s-Hertogenbosch@L=1100870@a=128@$202509151634$202509151731$IC 2761 $$1$$$$$$§W$A=1@O='s-Hertogenbosch@L=1100870@a=128@$A=1@O='s-Hertogenbosch@L=1101751@a=128@$202509151731$202509151733$$$1$$$$$$§T$A=1@O='s-Hertogenbosch@L=1101751@a=128@$A=1@O=Breda@L=1101034@a=128@$202509151741$202509151810$IC 3661 $$3$$$$$$§W$A=1@O=Breda@L=1101034@a=128@$A=1@O=Breda@L=1100942@a=128@$202509151810$202509151812$$$1$$$$$$§T$A=1@O=Breda@L=1100942@a=128@$A=1@O=Rotterdam Centraal@L=1100668@a=128@$202509151823$202509151845$IC 1162 $$1$$$$$$¶KC¶#VE#2#CF#100#CA#0#CM#0#SICT#0#AM#16465#AM2#0#RT#31#¶KCC¶#VE#0#ERG#45317#HIN#390#ECK#13954|13954|14077|14085|0|0|485|13938|3|0|8|0|0|-2147483648#¶KRCC¶#VE#1#MRTF#",
|
||||
"plannedDurationInMinutes": 131,
|
||||
"actualDurationInMinutes": 130,
|
||||
"transfers": 2,
|
||||
"status": "NORMAL",
|
||||
"messages": [],
|
||||
"legs": [
|
||||
{
|
||||
"idx": "0",
|
||||
"name": "IC 2761",
|
||||
"travelType": "PUBLIC_TRANSIT",
|
||||
"direction": "Maastricht",
|
||||
"partCancelled": false,
|
||||
"cancelled": false,
|
||||
"isAfterCancelledLeg": false,
|
||||
"isOnOrAfterCancelledLeg": false,
|
||||
"changePossible": true,
|
||||
"alternativeTransport": false,
|
||||
"journeyDetailRef": "HARP_MM-2|#VN#1#ST#1757498654#PI#0#ZI#1088#TA#0#DA#150925#1S#1101009#1T#1557#LS#1101011#LT#1903#PU#784#RT#1#CA#IC#ZE#2761#ZB#IC 2761 #PC#1#FR#1101009#FT#1557#TO#1101011#TT#1903#",
|
||||
"origin": {
|
||||
"name": "Amsterdam Centraal",
|
||||
"lng": 4.90027761459351,
|
||||
"lat": 52.3788871765137,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400058",
|
||||
"uicCdCode": "118400058",
|
||||
"stationCode": "ASD",
|
||||
"type": "STATION",
|
||||
"plannedTimeZoneOffset": 120,
|
||||
"plannedDateTime": "2025-09-15T16:34:00+0200",
|
||||
"actualTimeZoneOffset": 120,
|
||||
"actualDateTime": "2025-09-15T16:35:00+0200",
|
||||
"plannedTrack": "4",
|
||||
"actualTrack": "4",
|
||||
"checkinStatus": "NOTHING",
|
||||
"notes": []
|
||||
},
|
||||
"destination": {
|
||||
"name": "'s-Hertogenbosch",
|
||||
"lng": 5.29362,
|
||||
"lat": 51.69048,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400319",
|
||||
"uicCdCode": "118400319",
|
||||
"stationCode": "HT",
|
||||
"type": "STATION",
|
||||
"plannedTimeZoneOffset": 120,
|
||||
"plannedDateTime": "2025-09-15T17:31:00+0200",
|
||||
"actualTimeZoneOffset": 120,
|
||||
"actualDateTime": "2025-09-15T17:31:00+0200",
|
||||
"plannedTrack": "6",
|
||||
"actualTrack": "6",
|
||||
"exitSide": "RIGHT",
|
||||
"checkinStatus": "NOTHING",
|
||||
"notes": []
|
||||
},
|
||||
"product": {
|
||||
"productType": "Product",
|
||||
"number": "2761",
|
||||
"categoryCode": "IC",
|
||||
"shortCategoryName": "IC",
|
||||
"longCategoryName": "Intercity",
|
||||
"operatorCode": "NS",
|
||||
"operatorName": "NS",
|
||||
"operatorAdministrativeCode": 100,
|
||||
"type": "TRAIN",
|
||||
"displayName": "NS Intercity",
|
||||
"nameNesProperties": {
|
||||
"color": "text-body"
|
||||
},
|
||||
"iconNesProperties": {
|
||||
"color": "text-body",
|
||||
"icon": "train"
|
||||
},
|
||||
"notes": [
|
||||
[
|
||||
{
|
||||
"value": "NS Intercity",
|
||||
"shortValue": "NS Intercity",
|
||||
"accessibilityValue": "NS Intercity",
|
||||
"key": "PRODUCT_NAME",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"value": "richting Maastricht",
|
||||
"shortValue": "richting Maastricht",
|
||||
"accessibilityValue": "richting Maastricht",
|
||||
"key": "PRODUCT_DIRECTION",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"value": "2 tussenstops",
|
||||
"shortValue": "2 tussenstops",
|
||||
"accessibilityValue": "2 tussenstops",
|
||||
"key": "PRODUCT_INTERMEDIATE_STOPS",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"stops": [
|
||||
{
|
||||
"uicCode": "8400058",
|
||||
"uicCdCode": "118400058",
|
||||
"name": "Amsterdam Centraal",
|
||||
"lat": 52.3788871765137,
|
||||
"lng": 4.90027761459351,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 0,
|
||||
"plannedDepartureDateTime": "2025-09-15T16:34:00+0200",
|
||||
"plannedDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureDateTime": "2025-09-15T16:35:00+0200",
|
||||
"actualDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "4",
|
||||
"plannedDepartureTrack": "4",
|
||||
"plannedArrivalTrack": "4",
|
||||
"actualArrivalTrack": "4",
|
||||
"departureDelayInSeconds": 60,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
},
|
||||
{
|
||||
"uicCode": "8400057",
|
||||
"uicCdCode": "118400057",
|
||||
"name": "Amsterdam Amstel",
|
||||
"lat": 52.3466682434082,
|
||||
"lng": 4.91777801513672,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 2,
|
||||
"plannedDepartureDateTime": "2025-09-15T16:42:00+0200",
|
||||
"plannedDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureDateTime": "2025-09-15T16:43:00+0200",
|
||||
"actualDepartureTimeZoneOffset": 120,
|
||||
"plannedArrivalDateTime": "2025-09-15T16:42:00+0200",
|
||||
"plannedArrivalTimeZoneOffset": 120,
|
||||
"actualArrivalDateTime": "2025-09-15T16:43:00+0200",
|
||||
"actualArrivalTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "4",
|
||||
"plannedDepartureTrack": "4",
|
||||
"plannedArrivalTrack": "4",
|
||||
"actualArrivalTrack": "4",
|
||||
"departureDelayInSeconds": 60,
|
||||
"arrivalDelayInSeconds": 60,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
},
|
||||
{
|
||||
"uicCode": "8400621",
|
||||
"uicCdCode": "118400621",
|
||||
"name": "Utrecht Centraal",
|
||||
"lat": 52.0888900756836,
|
||||
"lng": 5.11027765274048,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 10,
|
||||
"plannedDepartureDateTime": "2025-09-15T17:03:00+0200",
|
||||
"plannedDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureDateTime": "2025-09-15T17:03:00+0200",
|
||||
"actualDepartureTimeZoneOffset": 120,
|
||||
"plannedArrivalDateTime": "2025-09-15T17:00:00+0200",
|
||||
"plannedArrivalTimeZoneOffset": 120,
|
||||
"actualArrivalDateTime": "2025-09-15T17:00:00+0200",
|
||||
"actualArrivalTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "15",
|
||||
"plannedDepartureTrack": "15",
|
||||
"plannedArrivalTrack": "15",
|
||||
"actualArrivalTrack": "15",
|
||||
"departureDelayInSeconds": 0,
|
||||
"arrivalDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
},
|
||||
{
|
||||
"uicCode": "8400319",
|
||||
"uicCdCode": "118400319",
|
||||
"name": "'s-Hertogenbosch",
|
||||
"lat": 51.69048,
|
||||
"lng": 5.29362,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 18,
|
||||
"plannedArrivalDateTime": "2025-09-15T17:31:00+0200",
|
||||
"plannedArrivalTimeZoneOffset": 120,
|
||||
"actualArrivalDateTime": "2025-09-15T17:31:00+0200",
|
||||
"actualArrivalTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "6",
|
||||
"plannedDepartureTrack": "6",
|
||||
"plannedArrivalTrack": "6",
|
||||
"actualArrivalTrack": "6",
|
||||
"arrivalDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
}
|
||||
],
|
||||
"crowdForecast": "MEDIUM",
|
||||
"bicycleSpotCount": 6,
|
||||
"crossPlatformTransfer": true,
|
||||
"shorterStock": false,
|
||||
"journeyDetail": [
|
||||
{
|
||||
"type": "TRAIN_XML",
|
||||
"link": {
|
||||
"uri": "/api/v2/journey?id=HARP_MM-2|#VN#1#ST#1757498654#PI#0#ZI#1088#TA#0#DA#150925#1S#1101009#1T#1557#LS#1101011#LT#1903#PU#784#RT#1#CA#IC#ZE#2761#ZB#IC 2761 #PC#1#FR#1101009#FT#1557#TO#1101011#TT#1903#&train=2761&datetime=2025-09-15T16:34:00+02:00"
|
||||
}
|
||||
}
|
||||
],
|
||||
"reachable": true,
|
||||
"plannedDurationInMinutes": 57,
|
||||
"nesProperties": {
|
||||
"color": "text-info",
|
||||
"scope": "LEG_LINE",
|
||||
"styles": {
|
||||
"type": "LineStyles",
|
||||
"dashed": false
|
||||
}
|
||||
},
|
||||
"duration": {
|
||||
"value": "56 min.",
|
||||
"accessibilityValue": "56 minuten",
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
},
|
||||
"preSteps": [],
|
||||
"postSteps": [],
|
||||
"transferTimeToNextLeg": 2,
|
||||
"distanceInMeters": 84795
|
||||
},
|
||||
{
|
||||
"idx": "1",
|
||||
"name": "IC 3661",
|
||||
"travelType": "PUBLIC_TRANSIT",
|
||||
"direction": "Roosendaal",
|
||||
"partCancelled": false,
|
||||
"cancelled": false,
|
||||
"isAfterCancelledLeg": false,
|
||||
"isOnOrAfterCancelledLeg": false,
|
||||
"changePossible": true,
|
||||
"alternativeTransport": false,
|
||||
"journeyDetailRef": "HARP_MM-2|#VN#1#ST#1757498654#PI#0#ZI#505945#TA#0#DA#150925#1S#1101167#1T#1550#LS#1101102#LT#1833#PU#784#RT#3#CA#IC#ZE#3661#ZB#IC 3661 #PC#1#FR#1101167#FT#1550#TO#1101102#TT#1833#",
|
||||
"origin": {
|
||||
"name": "'s-Hertogenbosch",
|
||||
"lng": 5.29362,
|
||||
"lat": 51.69048,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400319",
|
||||
"uicCdCode": "118400319",
|
||||
"stationCode": "HT",
|
||||
"type": "STATION",
|
||||
"plannedTimeZoneOffset": 120,
|
||||
"plannedDateTime": "2025-09-15T17:41:00+0200",
|
||||
"actualTimeZoneOffset": 120,
|
||||
"actualDateTime": "2025-09-15T17:41:00+0200",
|
||||
"plannedTrack": "7",
|
||||
"actualTrack": "7",
|
||||
"checkinStatus": "NOTHING",
|
||||
"notes": []
|
||||
},
|
||||
"destination": {
|
||||
"name": "Breda",
|
||||
"lng": 4.78000020980835,
|
||||
"lat": 51.5955543518066,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400131",
|
||||
"uicCdCode": "118400131",
|
||||
"stationCode": "BD",
|
||||
"type": "STATION",
|
||||
"plannedTimeZoneOffset": 120,
|
||||
"plannedDateTime": "2025-09-15T18:10:00+0200",
|
||||
"actualTimeZoneOffset": 120,
|
||||
"actualDateTime": "2025-09-15T18:10:00+0200",
|
||||
"plannedTrack": "8",
|
||||
"actualTrack": "8",
|
||||
"exitSide": "LEFT",
|
||||
"checkinStatus": "NOTHING",
|
||||
"notes": []
|
||||
},
|
||||
"product": {
|
||||
"productType": "Product",
|
||||
"number": "3661",
|
||||
"categoryCode": "IC",
|
||||
"shortCategoryName": "IC",
|
||||
"longCategoryName": "Intercity",
|
||||
"operatorCode": "NS",
|
||||
"operatorName": "NS",
|
||||
"operatorAdministrativeCode": 100,
|
||||
"type": "TRAIN",
|
||||
"displayName": "NS Intercity",
|
||||
"nameNesProperties": {
|
||||
"color": "text-body"
|
||||
},
|
||||
"iconNesProperties": {
|
||||
"color": "text-body",
|
||||
"icon": "train"
|
||||
},
|
||||
"notes": [
|
||||
[
|
||||
{
|
||||
"value": "NS Intercity",
|
||||
"shortValue": "NS Intercity",
|
||||
"accessibilityValue": "NS Intercity",
|
||||
"key": "PRODUCT_NAME",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"value": "richting Roosendaal",
|
||||
"shortValue": "richting Roosendaal",
|
||||
"accessibilityValue": "richting Roosendaal",
|
||||
"key": "PRODUCT_DIRECTION",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"value": "1 tussenstop",
|
||||
"shortValue": "1 tussenstop",
|
||||
"accessibilityValue": "1 tussenstop",
|
||||
"key": "PRODUCT_INTERMEDIATE_STOPS",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"transferMessages": [
|
||||
{
|
||||
"message": "Overstap op zelfde perron",
|
||||
"accessibilityMessage": "Overstap op zelfde perron",
|
||||
"type": "CROSS_PLATFORM",
|
||||
"messageNesProperties": {
|
||||
"color": "text-default",
|
||||
"type": "informative"
|
||||
}
|
||||
}
|
||||
],
|
||||
"stops": [
|
||||
{
|
||||
"uicCode": "8400319",
|
||||
"uicCdCode": "118400319",
|
||||
"name": "'s-Hertogenbosch",
|
||||
"lat": 51.69048,
|
||||
"lng": 5.29362,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 0,
|
||||
"plannedDepartureDateTime": "2025-09-15T17:41:00+0200",
|
||||
"plannedDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureDateTime": "2025-09-15T17:41:00+0200",
|
||||
"actualDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "7",
|
||||
"plannedDepartureTrack": "7",
|
||||
"plannedArrivalTrack": "7",
|
||||
"actualArrivalTrack": "7",
|
||||
"departureDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
},
|
||||
{
|
||||
"uicCode": "8400597",
|
||||
"uicCdCode": "118400597",
|
||||
"name": "Tilburg",
|
||||
"lat": 51.5605545043945,
|
||||
"lng": 5.08361101150513,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 1,
|
||||
"plannedDepartureDateTime": "2025-09-15T17:58:00+0200",
|
||||
"plannedDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureDateTime": "2025-09-15T17:58:00+0200",
|
||||
"actualDepartureTimeZoneOffset": 120,
|
||||
"plannedArrivalDateTime": "2025-09-15T17:56:00+0200",
|
||||
"plannedArrivalTimeZoneOffset": 120,
|
||||
"actualArrivalDateTime": "2025-09-15T17:56:00+0200",
|
||||
"actualArrivalTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "3",
|
||||
"plannedDepartureTrack": "3",
|
||||
"plannedArrivalTrack": "3",
|
||||
"actualArrivalTrack": "3",
|
||||
"departureDelayInSeconds": 0,
|
||||
"arrivalDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
},
|
||||
{
|
||||
"uicCode": "8400131",
|
||||
"uicCdCode": "118400131",
|
||||
"name": "Breda",
|
||||
"lat": 51.5955543518066,
|
||||
"lng": 4.78000020980835,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 5,
|
||||
"plannedArrivalDateTime": "2025-09-15T18:10:00+0200",
|
||||
"plannedArrivalTimeZoneOffset": 120,
|
||||
"actualArrivalDateTime": "2025-09-15T18:10:00+0200",
|
||||
"actualArrivalTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "8",
|
||||
"plannedDepartureTrack": "8",
|
||||
"plannedArrivalTrack": "8",
|
||||
"actualArrivalTrack": "8",
|
||||
"arrivalDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
}
|
||||
],
|
||||
"crowdForecast": "MEDIUM",
|
||||
"punctuality": 58.3,
|
||||
"crossPlatformTransfer": true,
|
||||
"shorterStock": false,
|
||||
"journeyDetail": [
|
||||
{
|
||||
"type": "TRAIN_XML",
|
||||
"link": {
|
||||
"uri": "/api/v2/journey?id=HARP_MM-2|#VN#1#ST#1757498654#PI#0#ZI#505945#TA#0#DA#150925#1S#1101167#1T#1550#LS#1101102#LT#1833#PU#784#RT#3#CA#IC#ZE#3661#ZB#IC 3661 #PC#1#FR#1101167#FT#1550#TO#1101102#TT#1833#&train=3661&datetime=2025-09-15T17:41:00+02:00"
|
||||
}
|
||||
}
|
||||
],
|
||||
"reachable": true,
|
||||
"plannedDurationInMinutes": 29,
|
||||
"nesProperties": {
|
||||
"color": "text-info",
|
||||
"scope": "LEG_LINE",
|
||||
"styles": {
|
||||
"type": "LineStyles",
|
||||
"dashed": false
|
||||
}
|
||||
},
|
||||
"duration": {
|
||||
"value": "29 min.",
|
||||
"accessibilityValue": "29 minuten",
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
},
|
||||
"preSteps": [],
|
||||
"postSteps": [],
|
||||
"transferTimeToNextLeg": 2,
|
||||
"distanceInMeters": 41871
|
||||
},
|
||||
{
|
||||
"idx": "2",
|
||||
"name": "IC 1162",
|
||||
"travelType": "PUBLIC_TRANSIT",
|
||||
"direction": "Den Haag Centraal",
|
||||
"partCancelled": false,
|
||||
"cancelled": false,
|
||||
"isAfterCancelledLeg": false,
|
||||
"isOnOrAfterCancelledLeg": false,
|
||||
"changePossible": true,
|
||||
"alternativeTransport": false,
|
||||
"journeyDetailRef": "HARP_MM-2|#VN#1#ST#1757498654#PI#0#ZI#51#TA#9#DA#150925#1S#1100921#1T#1743#LS#1101078#LT#1911#PU#784#RT#1#CA#IC#ZE#1162#ZB#IC 1162 #PC#1#FR#1100921#FT#1743#TO#1101078#TT#1911#",
|
||||
"origin": {
|
||||
"name": "Breda",
|
||||
"lng": 4.78000020980835,
|
||||
"lat": 51.5955543518066,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400131",
|
||||
"uicCdCode": "118400131",
|
||||
"stationCode": "BD",
|
||||
"type": "STATION",
|
||||
"plannedTimeZoneOffset": 120,
|
||||
"plannedDateTime": "2025-09-15T18:23:00+0200",
|
||||
"actualTimeZoneOffset": 120,
|
||||
"actualDateTime": "2025-09-15T18:23:00+0200",
|
||||
"plannedTrack": "7",
|
||||
"actualTrack": "7",
|
||||
"checkinStatus": "NOTHING",
|
||||
"notes": []
|
||||
},
|
||||
"destination": {
|
||||
"name": "Rotterdam Centraal",
|
||||
"lng": 4.46888875961304,
|
||||
"lat": 51.9249992370605,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400530",
|
||||
"uicCdCode": "118400530",
|
||||
"stationCode": "RTD",
|
||||
"type": "STATION",
|
||||
"plannedTimeZoneOffset": 120,
|
||||
"plannedDateTime": "2025-09-15T18:45:00+0200",
|
||||
"actualTimeZoneOffset": 120,
|
||||
"actualDateTime": "2025-09-15T18:45:00+0200",
|
||||
"plannedTrack": "13",
|
||||
"actualTrack": "13",
|
||||
"exitSide": "RIGHT",
|
||||
"checkinStatus": "NOTHING",
|
||||
"notes": []
|
||||
},
|
||||
"product": {
|
||||
"productType": "Product",
|
||||
"number": "1162",
|
||||
"categoryCode": "IC",
|
||||
"shortCategoryName": "IC",
|
||||
"longCategoryName": "Intercity",
|
||||
"operatorCode": "NS",
|
||||
"operatorName": "NS",
|
||||
"operatorAdministrativeCode": 100,
|
||||
"type": "TRAIN",
|
||||
"displayName": "NS Intercity",
|
||||
"nameNesProperties": {
|
||||
"color": "text-body"
|
||||
},
|
||||
"iconNesProperties": {
|
||||
"color": "text-body",
|
||||
"icon": "train"
|
||||
},
|
||||
"notes": [
|
||||
[
|
||||
{
|
||||
"value": "NS Intercity",
|
||||
"shortValue": "NS Intercity",
|
||||
"accessibilityValue": "NS Intercity",
|
||||
"key": "PRODUCT_NAME",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"value": "richting Den Haag Centraal",
|
||||
"shortValue": "richting Den Haag Centraal",
|
||||
"accessibilityValue": "richting Den Haag Centraal",
|
||||
"key": "PRODUCT_DIRECTION",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"value": "Geen tussenstops",
|
||||
"shortValue": "Geen tussenstops",
|
||||
"accessibilityValue": "Geen tussenstops",
|
||||
"key": "PRODUCT_INTERMEDIATE_STOPS",
|
||||
"noteType": "ATTRIBUTE",
|
||||
"isPresentationRequired": true,
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"transferMessages": [
|
||||
{
|
||||
"message": "Overstap op zelfde perron",
|
||||
"accessibilityMessage": "Overstap op zelfde perron",
|
||||
"type": "CROSS_PLATFORM",
|
||||
"messageNesProperties": {
|
||||
"color": "text-default",
|
||||
"type": "informative"
|
||||
}
|
||||
}
|
||||
],
|
||||
"stops": [
|
||||
{
|
||||
"uicCode": "8400131",
|
||||
"uicCdCode": "118400131",
|
||||
"name": "Breda",
|
||||
"lat": 51.5955543518066,
|
||||
"lng": 4.78000020980835,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 0,
|
||||
"plannedDepartureDateTime": "2025-09-15T18:23:00+0200",
|
||||
"plannedDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureDateTime": "2025-09-15T18:23:00+0200",
|
||||
"actualDepartureTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "7",
|
||||
"plannedDepartureTrack": "7",
|
||||
"plannedArrivalTrack": "7",
|
||||
"actualArrivalTrack": "7",
|
||||
"departureDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
},
|
||||
{
|
||||
"uicCode": "8400530",
|
||||
"uicCdCode": "118400530",
|
||||
"name": "Rotterdam Centraal",
|
||||
"lat": 51.9249992370605,
|
||||
"lng": 4.46888875961304,
|
||||
"countryCode": "NL",
|
||||
"notes": [],
|
||||
"routeIdx": 6,
|
||||
"plannedArrivalDateTime": "2025-09-15T18:45:00+0200",
|
||||
"plannedArrivalTimeZoneOffset": 120,
|
||||
"actualArrivalDateTime": "2025-09-15T18:45:00+0200",
|
||||
"actualArrivalTimeZoneOffset": 120,
|
||||
"actualDepartureTrack": "13",
|
||||
"plannedDepartureTrack": "13",
|
||||
"plannedArrivalTrack": "13",
|
||||
"actualArrivalTrack": "13",
|
||||
"arrivalDelayInSeconds": 0,
|
||||
"cancelled": false,
|
||||
"borderStop": false,
|
||||
"passing": false
|
||||
}
|
||||
],
|
||||
"crowdForecast": "LOW",
|
||||
"bicycleSpotCount": 16,
|
||||
"punctuality": 81.8,
|
||||
"shorterStock": false,
|
||||
"journeyDetail": [
|
||||
{
|
||||
"type": "TRAIN_XML",
|
||||
"link": {
|
||||
"uri": "/api/v2/journey?id=HARP_MM-2|#VN#1#ST#1757498654#PI#0#ZI#51#TA#9#DA#150925#1S#1100921#1T#1743#LS#1101078#LT#1911#PU#784#RT#1#CA#IC#ZE#1162#ZB#IC 1162 #PC#1#FR#1100921#FT#1743#TO#1101078#TT#1911#&train=1162&datetime=2025-09-15T18:23:00+02:00"
|
||||
}
|
||||
}
|
||||
],
|
||||
"reachable": true,
|
||||
"plannedDurationInMinutes": 22,
|
||||
"nesProperties": {
|
||||
"color": "text-info",
|
||||
"scope": "LEG_LINE",
|
||||
"styles": {
|
||||
"type": "LineStyles",
|
||||
"dashed": false
|
||||
}
|
||||
},
|
||||
"duration": {
|
||||
"value": "22 min.",
|
||||
"accessibilityValue": "22 minuten",
|
||||
"nesProperties": {
|
||||
"color": "text-body"
|
||||
}
|
||||
},
|
||||
"preSteps": [],
|
||||
"postSteps": [],
|
||||
"distanceInMeters": 44166
|
||||
}
|
||||
],
|
||||
"checksum": "fe950328_3",
|
||||
"crowdForecast": "MEDIUM",
|
||||
"punctuality": 58.3,
|
||||
"optimal": false,
|
||||
"fares": [],
|
||||
"fareLegs": [
|
||||
{
|
||||
"origin": {
|
||||
"name": "Amsterdam Centraal",
|
||||
"lng": 4.90027761459351,
|
||||
"lat": 52.3788871765137,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400058",
|
||||
"uicCdCode": "118400058",
|
||||
"stationCode": "ASD",
|
||||
"type": "STATION"
|
||||
},
|
||||
"destination": {
|
||||
"name": "'s-Hertogenbosch",
|
||||
"lng": 5.29362,
|
||||
"lat": 51.69048,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400319",
|
||||
"uicCdCode": "118400319",
|
||||
"stationCode": "HT",
|
||||
"type": "STATION"
|
||||
},
|
||||
"operator": "NS",
|
||||
"productTypes": ["TRAIN"],
|
||||
"fares": [
|
||||
{
|
||||
"priceInCents": 1910,
|
||||
"priceInCentsExcludingSupplement": 1910,
|
||||
"supplementInCents": 0,
|
||||
"buyableTicketSupplementPriceInCents": 0,
|
||||
"product": "OVCHIPKAART_ENKELE_REIS",
|
||||
"travelClass": "SECOND_CLASS",
|
||||
"discountType": "NO_DISCOUNT"
|
||||
}
|
||||
],
|
||||
"travelDate": "2025-09-15"
|
||||
},
|
||||
{
|
||||
"origin": {
|
||||
"name": "'s-Hertogenbosch",
|
||||
"lng": 5.29362,
|
||||
"lat": 51.69048,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400319",
|
||||
"uicCdCode": "118400319",
|
||||
"stationCode": "HT",
|
||||
"type": "STATION"
|
||||
},
|
||||
"destination": {
|
||||
"name": "Rotterdam Centraal",
|
||||
"lng": 4.46888875961304,
|
||||
"lat": 51.9249992370605,
|
||||
"countryCode": "NL",
|
||||
"uicCode": "8400530",
|
||||
"uicCdCode": "118400530",
|
||||
"stationCode": "RTD",
|
||||
"type": "STATION"
|
||||
},
|
||||
"operator": "NS",
|
||||
"productTypes": ["TRAIN"],
|
||||
"fares": [
|
||||
{
|
||||
"priceInCents": 2010,
|
||||
"priceInCentsExcludingSupplement": 2010,
|
||||
"supplementInCents": 0,
|
||||
"buyableTicketSupplementPriceInCents": 0,
|
||||
"product": "OVCHIPKAART_ENKELE_REIS",
|
||||
"travelClass": "SECOND_CLASS",
|
||||
"discountType": "NO_DISCOUNT"
|
||||
}
|
||||
],
|
||||
"travelDate": "2025-09-15"
|
||||
}
|
||||
],
|
||||
"productFare": {
|
||||
"priceInCents": 3920,
|
||||
"priceInCentsExcludingSupplement": 3920,
|
||||
"buyableTicketPriceInCents": 3920,
|
||||
"buyableTicketPriceInCentsExcludingSupplement": 3920,
|
||||
"product": "OVCHIPKAART_ENKELE_REIS",
|
||||
"travelClass": "SECOND_CLASS",
|
||||
"discountType": "NO_DISCOUNT"
|
||||
},
|
||||
"fareOptions": {
|
||||
"isInternationalBookable": false,
|
||||
"isInternational": false,
|
||||
"isEticketBuyable": false,
|
||||
"isPossibleWithOvChipkaart": false,
|
||||
"isTotalPriceUnknown": false,
|
||||
"reasonEticketNotBuyable": {
|
||||
"reason": "VIA_STATION_REQUESTED",
|
||||
"description": "Je kunt voor deze reis geen kaartje kopen, omdat je je reis via een extra station hebt gepland. Uiteraard kun je voor deze reis betalen met het saldo op je OV-chipkaart."
|
||||
}
|
||||
},
|
||||
"nsiLink": {
|
||||
"url": "https://www.nsinternational.com/nl/treintickets-v3/#/search/ASD/RTD/20250915/1634/1845?stationType=domestic&cookieConsent=false",
|
||||
"showInternationalBanner": false
|
||||
},
|
||||
"type": "NS",
|
||||
"shareUrl": {
|
||||
"uri": "https://www.ns.nl/rpx?ctx=arnu%7CfromStation%3D8400058%7CrequestedFromStation%3D8400058%7CtoStation%3D8400530%7CrequestedToStation%3D8400530%7CviaStation%3D8400319%7CplannedFromTime%3D2025-09-15T16%3A34%3A00%2B02%3A00%7CplannedArrivalTime%3D2025-09-15T18%3A45%3A00%2B02%3A00%7CexcludeHighSpeedTrains%3Dfalse%7CsearchForAccessibleTrip%3Dfalse%7ClocalTrainsOnly%3Dfalse%7CdisabledTransportModalities%3DBUS%2CFERRY%2CTRAM%2CMETRO%7CtravelAssistance%3Dfalse%7CtripSummaryHash%3D1596512355"
|
||||
},
|
||||
"realtime": true,
|
||||
"registerJourney": {
|
||||
"url": "https://treinwijzer.ns.nl/idp/login?ctxRecon=arnu%7CfromStation%3D8400058%7CrequestedFromStation%3D8400058%7CtoStation%3D8400530%7CrequestedToStation%3D8400530%7CviaStation%3D8400319%7CplannedFromTime%3D2025-09-15T16%3A34%3A00%2B02%3A00%7CplannedArrivalTime%3D2025-09-15T18%3A45%3A00%2B02%3A00%7CexcludeHighSpeedTrains%3Dfalse%7CsearchForAccessibleTrip%3Dfalse%7ClocalTrainsOnly%3Dfalse%7CdisabledTransportModalities%3DBUS%2CFERRY%2CTRAM%2CMETRO%7CtravelAssistance%3Dfalse%7CtripSummaryHash%3D1596512355&originUicCode=8400058&destinationUicCode=8400530&dateTime=2025-09-15T16%3A28%3A00.051873%2B02%3A00&searchForArrival=false&viaUicCode=8400319&excludeHighSpeedTrains=false&localTrainsOnly=false&searchForAccessibleTrip=false&lang=nl&travelAssistance=false",
|
||||
"searchUrl": "https://treinwijzer.ns.nl/idp/login?search=true&originUicCode=8400058&destinationUicCode=8400530&dateTime=2025-09-15T16%3A28%3A00.051873%2B02%3A00&searchForArrival=false&viaUicCode=8400319&excludeHighSpeedTrains=false&localTrainsOnly=false&searchForAccessibleTrip=false&lang=nl&travelAssistance=false",
|
||||
"status": "REGISTRATION_POSSIBLE",
|
||||
"bicycleReservationRequired": false
|
||||
},
|
||||
"modalityListItems": [
|
||||
{
|
||||
"name": "Intercity",
|
||||
"nameNesProperties": {
|
||||
"color": "text-subtle",
|
||||
"styles": {
|
||||
"type": "TextStyles",
|
||||
"strikethrough": false,
|
||||
"bold": false
|
||||
}
|
||||
},
|
||||
"iconNesProperties": {
|
||||
"color": "text-body",
|
||||
"icon": "train"
|
||||
},
|
||||
"actualTrack": "4",
|
||||
"accessibilityName": "Intercity"
|
||||
},
|
||||
{
|
||||
"name": "Intercity",
|
||||
"nameNesProperties": {
|
||||
"color": "text-subtle",
|
||||
"styles": {
|
||||
"type": "TextStyles",
|
||||
"strikethrough": false,
|
||||
"bold": false
|
||||
}
|
||||
},
|
||||
"iconNesProperties": {
|
||||
"color": "text-body",
|
||||
"icon": "train"
|
||||
},
|
||||
"actualTrack": "7",
|
||||
"accessibilityName": "Intercity"
|
||||
},
|
||||
{
|
||||
"name": "Intercity",
|
||||
"nameNesProperties": {
|
||||
"color": "text-subtle",
|
||||
"styles": {
|
||||
"type": "TextStyles",
|
||||
"strikethrough": false,
|
||||
"bold": false
|
||||
}
|
||||
},
|
||||
"iconNesProperties": {
|
||||
"color": "text-body",
|
||||
"icon": "train"
|
||||
},
|
||||
"actualTrack": "7",
|
||||
"accessibilityName": "Intercity"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"scrollRequestBackwardContext": "3|OB|MTµ14µ13954µ13944µ14077µ14080µ0µ0µ485µ13938µ1µ0µ8µ0µ0µ-2147483648µ1µ2|PDHµ28839c70675e70c3d48018993723bca2|RDµ15092025|RTµ161800|USµ0|RSµINIT",
|
||||
"scrollRequestForwardContext": "3|OF|MTµ14µ13985µ13985µ14107µ14115µ0µ0µ485µ13955µ8µ0µ8µ0µ0µ-2147483648µ1µ2|PDHµ28839c70675e70c3d48018993723bca2|RDµ15092025|RTµ161800|USµ0|RSµINIT"
|
||||
}
|
||||
@@ -0,0 +1,295 @@
|
||||
# serializer version: 1
|
||||
# name: test_binary_sensor[binary_sensor.to_home_arrival_delayed-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.to_home_arrival_delayed',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Arrival delayed',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'is_arrival_delayed',
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY9-is_arrival_delayed',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_home_arrival_delayed-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'friendly_name': 'To home Arrival delayed',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.to_home_arrival_delayed',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_home_departure_delayed-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.to_home_departure_delayed',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Departure delayed',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'is_departure_delayed',
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY9-is_departure_delayed',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_home_departure_delayed-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'friendly_name': 'To home Departure delayed',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.to_home_departure_delayed',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_home_going-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.to_home_going',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Going',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'is_going',
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY9-is_going',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_home_going-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'friendly_name': 'To home Going',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.to_home_going',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_work_arrival_delayed-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.to_work_arrival_delayed',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Arrival delayed',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'is_arrival_delayed',
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY8-is_arrival_delayed',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_work_arrival_delayed-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'friendly_name': 'To work Arrival delayed',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.to_work_arrival_delayed',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'off',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_work_departure_delayed-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.to_work_departure_delayed',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Departure delayed',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'is_departure_delayed',
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY8-is_departure_delayed',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_work_departure_delayed-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'friendly_name': 'To work Departure delayed',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.to_work_departure_delayed',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_work_going-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'binary_sensor',
|
||||
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
|
||||
'entity_id': 'binary_sensor.to_work_going',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Going',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'is_going',
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY8-is_going',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_binary_sensor[binary_sensor.to_work_going-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'friendly_name': 'To work Going',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'binary_sensor.to_work_going',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'on',
|
||||
})
|
||||
# ---
|
||||
@@ -1,4 +1,106 @@
|
||||
# serializer version: 1
|
||||
# name: test_no_trips_sensor[sensor.to_home-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.to_home',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': 'mdi:train',
|
||||
'original_name': 'To home',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY9-actual_departure',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_no_trips_sensor[sensor.to_home-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'To home',
|
||||
'icon': 'mdi:train',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.to_home',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_no_trips_sensor[sensor.to_work-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.to_work',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': 'mdi:train',
|
||||
'original_name': 'To work',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY8-actual_departure',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_no_trips_sensor[sensor.to_work-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'attribution': 'Data provided by NS',
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'To work',
|
||||
'icon': 'mdi:train',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.to_work',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_sensor[sensor.to_home-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
@@ -143,3 +245,147 @@
|
||||
'state': '2025-09-15T14:35:00+00:00',
|
||||
})
|
||||
# ---
|
||||
# name: test_single_trip_sensor[sensor.to_home-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.to_home',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': 'mdi:train',
|
||||
'original_name': 'To home',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY9-actual_departure',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_single_trip_sensor[sensor.to_home-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'arrival_delay': False,
|
||||
'arrival_platform_actual': '13',
|
||||
'arrival_platform_planned': '13',
|
||||
'arrival_time_actual': '18:45',
|
||||
'arrival_time_planned': '18:45',
|
||||
'attribution': 'Data provided by NS',
|
||||
'departure_delay': True,
|
||||
'departure_platform_actual': '4',
|
||||
'departure_platform_planned': '4',
|
||||
'departure_time_actual': '16:35',
|
||||
'departure_time_planned': '16:34',
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'To home',
|
||||
'going': True,
|
||||
'icon': 'mdi:train',
|
||||
'next': None,
|
||||
'remarks': None,
|
||||
'route': list([
|
||||
'Amsterdam Centraal',
|
||||
"'s-Hertogenbosch",
|
||||
'Breda',
|
||||
'Rotterdam Centraal',
|
||||
]),
|
||||
'status': 'normal',
|
||||
'transfers': 2,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.to_home',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '2025-09-15T14:35:00+00:00',
|
||||
})
|
||||
# ---
|
||||
# name: test_single_trip_sensor[sensor.to_work-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.to_work',
|
||||
'has_entity_name': False,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': <SensorDeviceClass.TIMESTAMP: 'timestamp'>,
|
||||
'original_icon': 'mdi:train',
|
||||
'original_name': 'To work',
|
||||
'platform': 'nederlandse_spoorwegen',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '01K721DZPMEN39R5DK0ATBMSY8-actual_departure',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_single_trip_sensor[sensor.to_work-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'arrival_delay': False,
|
||||
'arrival_platform_actual': '13',
|
||||
'arrival_platform_planned': '13',
|
||||
'arrival_time_actual': '18:45',
|
||||
'arrival_time_planned': '18:45',
|
||||
'attribution': 'Data provided by NS',
|
||||
'departure_delay': True,
|
||||
'departure_platform_actual': '4',
|
||||
'departure_platform_planned': '4',
|
||||
'departure_time_actual': '16:35',
|
||||
'departure_time_planned': '16:34',
|
||||
'device_class': 'timestamp',
|
||||
'friendly_name': 'To work',
|
||||
'going': True,
|
||||
'icon': 'mdi:train',
|
||||
'next': None,
|
||||
'remarks': None,
|
||||
'route': list([
|
||||
'Amsterdam Centraal',
|
||||
"'s-Hertogenbosch",
|
||||
'Breda',
|
||||
'Rotterdam Centraal',
|
||||
]),
|
||||
'status': 'normal',
|
||||
'transfers': 2,
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.to_work',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '2025-09-15T14:35:00+00:00',
|
||||
})
|
||||
# ---
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user