Compare commits

..

20 Commits

Author SHA1 Message Date
Daniel Hjelseth Høyer
d7aa939f83 Merge branch 'tibber_data' of github.com:home-assistant/core into tibber_data 2025-11-19 06:53:01 +01:00
Daniel Hjelseth Høyer
77b349d00f test
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-19 06:52:06 +01:00
Daniel Hjelseth Høyer
1c036128fa Merge branch 'dev' into tibber_data 2025-11-18 08:40:09 +01:00
Daniel Hjelseth Høyer
16d898cc8e test coverage
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-18 07:12:48 +01:00
Daniel Hjelseth Høyer
a7225c7cd4 Merge branch 'dev' into tibber_data 2025-11-18 06:51:29 +01:00
Daniel Hjelseth Høyer
433a429c5a test coverage
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-18 06:37:33 +01:00
Daniel Hjelseth Høyer
c4770ed423 test coverage
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-17 20:57:03 +01:00
Daniel Hjelseth Høyer
df329fd273 test coverage
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-17 20:36:43 +01:00
Daniel Hjelseth Høyer
6eb40574bc tests
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-16 19:39:19 +01:00
Daniel Hjelseth Høyer
4fd1ef5483 Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-16 17:49:18 +01:00
Daniel Hjelseth Høyer
7ec5d5305d Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-16 16:38:01 +01:00
Daniel Hjelseth Høyer
7f31d2538e Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-16 16:08:45 +01:00
Daniel Hjelseth Høyer
e1943307cf Merge branch 'dev' into tibber_data 2025-11-16 16:08:21 +01:00
Daniel Hjelseth Høyer
a06529d187 Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-16 15:59:18 +01:00
Daniel Hjelseth Høyer
21554af6a1 Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-16 12:14:03 +01:00
Daniel Hjelseth Høyer
b4aae93c45 Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-14 19:18:22 +01:00
Daniel Hjelseth Høyer
1f9c244c5c Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-14 06:01:05 +01:00
Daniel Hjelseth Høyer
9fa1b1b8df Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-13 22:11:18 +01:00
Daniel Hjelseth Høyer
f3ac3ecf05 Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-13 21:07:27 +01:00
Daniel Hjelseth Høyer
9477b2206b Tibber data api
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2025-11-13 20:07:57 +01:00
285 changed files with 5353 additions and 18016 deletions

View File

@@ -27,7 +27,7 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
@@ -94,7 +94,7 @@ jobs:
- arch: i386
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -227,7 +227,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set build additional args
run: |
@@ -265,7 +265,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -309,7 +309,7 @@ jobs:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
@@ -418,7 +418,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
@@ -463,7 +463,7 @@ jobs:
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0

View File

@@ -99,7 +99,7 @@ jobs:
steps:
- &checkout
name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: |

View File

@@ -21,14 +21,14 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3
with:
category: "/language:python"

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- &checkout
name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python

6
CODEOWNERS generated
View File

@@ -627,8 +627,6 @@ build.json @home-assistant/supervisor
/tests/components/guardian/ @bachya
/homeassistant/components/habitica/ @tr4nt0r
/tests/components/habitica/ @tr4nt0r
/homeassistant/components/hanna/ @bestycame
/tests/components/hanna/ @bestycame
/homeassistant/components/hardkernel/ @home-assistant/core
/tests/components/hardkernel/ @home-assistant/core
/homeassistant/components/hardware/ @home-assistant/core
@@ -848,8 +846,6 @@ build.json @home-assistant/supervisor
/tests/components/kraken/ @eifinger
/homeassistant/components/kulersky/ @emlove
/tests/components/kulersky/ @emlove
/homeassistant/components/labs/ @home-assistant/core
/tests/components/labs/ @home-assistant/core
/homeassistant/components/lacrosse_view/ @IceBotYT
/tests/components/lacrosse_view/ @IceBotYT
/homeassistant/components/lamarzocco/ @zweckj
@@ -1740,8 +1736,6 @@ build.json @home-assistant/supervisor
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
/homeassistant/components/vicare/ @CFenner
/tests/components/vicare/ @CFenner
/homeassistant/components/victron_ble/ @rajlaud
/tests/components/victron_ble/ @rajlaud
/homeassistant/components/victron_remote_monitoring/ @AndyTempel
/tests/components/victron_remote_monitoring/ @AndyTempel
/homeassistant/components/vilfo/ @ManneW

2
Dockerfile generated
View File

@@ -25,7 +25,7 @@ RUN \
"armv7") go2rtc_suffix='arm' ;; \
*) go2rtc_suffix=${BUILD_ARCH} ;; \
esac \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& chmod +x /bin/go2rtc \
# Verify go2rtc can be executed
&& go2rtc --version

View File

@@ -176,8 +176,6 @@ FRONTEND_INTEGRATIONS = {
STAGE_0_INTEGRATIONS = (
# Load logging and http deps as soon as possible
("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS, None),
# Setup labs for preview features
("labs", {"labs"}, STAGE_0_SUBSTAGE_TIMEOUT),
# Setup frontend
("frontend", FRONTEND_INTEGRATIONS, None),
# Setup recorder
@@ -214,7 +212,6 @@ DEFAULT_INTEGRATIONS = {
"backup",
"frontend",
"hardware",
"labs",
"logger",
"network",
"system_health",

View File

@@ -1,5 +0,0 @@
{
"domain": "victron",
"name": "Victron",
"integrations": ["victron_ble", "victron_remote_monitoring"]
}

View File

@@ -6,8 +6,9 @@ import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.core import Event, HassJob, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import async_call_later, async_track_time_interval
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
@@ -19,7 +20,7 @@ from .analytics import (
EntityAnalyticsModifications,
async_devices_payload,
)
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, PREFERENCE_SCHEMA
from .const import ATTR_ONBOARDED, ATTR_PREFERENCES, DOMAIN, INTERVAL, PREFERENCE_SCHEMA
from .http import AnalyticsDevicesView
__all__ = [
@@ -42,9 +43,28 @@ async def async_setup(hass: HomeAssistant, _: ConfigType) -> bool:
# Load stored data
await analytics.load()
async def start_schedule(_event: Event) -> None:
@callback
def start_schedule(_event: Event) -> None:
"""Start the send schedule after the started event."""
await analytics.async_schedule()
# Wait 15 min after started
async_call_later(
hass,
900,
HassJob(
analytics.send_analytics,
name="analytics schedule",
cancel_on_shutdown=True,
),
)
# Send every day
async_track_time_interval(
hass,
analytics.send_analytics,
INTERVAL,
name="analytics daily",
cancel_on_shutdown=True,
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, start_schedule)
@@ -91,7 +111,7 @@ async def websocket_analytics_preferences(
analytics = hass.data[DATA_COMPONENT]
await analytics.save_preferences(preferences)
await analytics.async_schedule()
await analytics.send_analytics()
connection.send_result(
msg["id"],

View File

@@ -7,8 +7,6 @@ from asyncio import timeout
from collections.abc import Awaitable, Callable, Iterable, Mapping
from dataclasses import asdict as dataclass_asdict, dataclass, field
from datetime import datetime
import random
import time
from typing import Any, Protocol
import uuid
@@ -33,18 +31,10 @@ from homeassistant.const import (
BASE_PLATFORMS,
__version__ as HA_VERSION,
)
from homeassistant.core import (
CALLBACK_TYPE,
HassJob,
HomeAssistant,
ReleaseChannel,
callback,
get_release_channel,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later, async_track_time_interval
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.singleton import singleton
from homeassistant.helpers.storage import Store
@@ -61,7 +51,6 @@ from homeassistant.setup import async_get_loaded_integrations
from .const import (
ANALYTICS_ENDPOINT_URL,
ANALYTICS_ENDPOINT_URL_DEV,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
ATTR_ADDON_COUNT,
ATTR_ADDONS,
ATTR_ARCH,
@@ -82,7 +71,6 @@ from .const import (
ATTR_PROTECTED,
ATTR_RECORDER,
ATTR_SLUG,
ATTR_SNAPSHOTS,
ATTR_STATE_COUNT,
ATTR_STATISTICS,
ATTR_SUPERVISOR,
@@ -92,10 +80,8 @@ from .const import (
ATTR_UUID,
ATTR_VERSION,
DOMAIN,
INTERVAL,
LOGGER,
PREFERENCE_SCHEMA,
SNAPSHOT_VERSION,
STORAGE_KEY,
STORAGE_VERSION,
)
@@ -208,18 +194,13 @@ def gen_uuid() -> str:
return uuid.uuid4().hex
RELEASE_CHANNEL = get_release_channel()
@dataclass
class AnalyticsData:
"""Analytics data."""
onboarded: bool
preferences: dict[str, bool]
uuid: str | None = None
submission_identifier: str | None = None
snapshot_submission_time: float | None = None
uuid: str | None
@classmethod
def from_dict(cls, data: dict[str, Any]) -> AnalyticsData:
@@ -228,8 +209,6 @@ class AnalyticsData:
data["onboarded"],
data["preferences"],
data["uuid"],
data.get("submission_identifier"),
data.get("snapshot_submission_time"),
)
@@ -240,10 +219,8 @@ class Analytics:
"""Initialize the Analytics class."""
self.hass: HomeAssistant = hass
self.session = async_get_clientsession(hass)
self._data = AnalyticsData(False, {})
self._data = AnalyticsData(False, {}, None)
self._store = Store[dict[str, Any]](hass, STORAGE_VERSION, STORAGE_KEY)
self._basic_scheduled: CALLBACK_TYPE | None = None
self._snapshot_scheduled: CALLBACK_TYPE | None = None
@property
def preferences(self) -> dict:
@@ -251,7 +228,6 @@ class Analytics:
preferences = self._data.preferences
return {
ATTR_BASE: preferences.get(ATTR_BASE, False),
ATTR_SNAPSHOTS: preferences.get(ATTR_SNAPSHOTS, False),
ATTR_DIAGNOSTICS: preferences.get(ATTR_DIAGNOSTICS, False),
ATTR_USAGE: preferences.get(ATTR_USAGE, False),
ATTR_STATISTICS: preferences.get(ATTR_STATISTICS, False),
@@ -268,9 +244,9 @@ class Analytics:
return self._data.uuid
@property
def endpoint_basic(self) -> str:
def endpoint(self) -> str:
"""Return the endpoint that will receive the payload."""
if RELEASE_CHANNEL is ReleaseChannel.DEV:
if HA_VERSION.endswith("0.dev0"):
# dev installations will contact the dev analytics environment
return ANALYTICS_ENDPOINT_URL_DEV
return ANALYTICS_ENDPOINT_URL
@@ -301,17 +277,13 @@ class Analytics:
):
self._data.preferences[ATTR_DIAGNOSTICS] = False
async def _save(self) -> None:
"""Save data."""
await self._store.async_save(dataclass_asdict(self._data))
async def save_preferences(self, preferences: dict) -> None:
"""Save preferences."""
preferences = PREFERENCE_SCHEMA(preferences)
self._data.preferences.update(preferences)
self._data.onboarded = True
await self._save()
await self._store.async_save(dataclass_asdict(self._data))
if self.supervisor:
await hassio.async_update_diagnostics(
@@ -320,16 +292,17 @@ class Analytics:
async def send_analytics(self, _: datetime | None = None) -> None:
"""Send analytics."""
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
return
hass = self.hass
supervisor_info = None
operating_system_info: dict[str, Any] = {}
if not self.onboarded or not self.preferences.get(ATTR_BASE, False):
LOGGER.debug("Nothing to submit")
return
if self._data.uuid is None:
self._data.uuid = gen_uuid()
await self._save()
await self._store.async_save(dataclass_asdict(self._data))
if self.supervisor:
supervisor_info = hassio.get_supervisor_info(hass)
@@ -463,7 +436,7 @@ class Analytics:
try:
async with timeout(30):
response = await self.session.post(self.endpoint_basic, json=payload)
response = await self.session.post(self.endpoint, json=payload)
if response.status == 200:
LOGGER.info(
(
@@ -476,7 +449,7 @@ class Analytics:
LOGGER.warning(
"Sending analytics failed with statuscode %s from %s",
response.status,
self.endpoint_basic,
self.endpoint,
)
except TimeoutError:
LOGGER.error("Timeout sending analytics to %s", ANALYTICS_ENDPOINT_URL)
@@ -516,182 +489,6 @@ class Analytics:
if entry.source != SOURCE_IGNORE and entry.disabled_by is None
)
async def send_snapshot(self, _: datetime | None = None) -> None:
"""Send a snapshot."""
if not self.onboarded or not self.preferences.get(ATTR_SNAPSHOTS, False):
return
payload = await _async_snapshot_payload(self.hass)
headers = {
"Content-Type": "application/json",
"User-Agent": f"home-assistant/{HA_VERSION}",
}
if self._data.submission_identifier is not None:
headers["X-Device-Database-Submission-Identifier"] = (
self._data.submission_identifier
)
try:
async with timeout(30):
response = await self.session.post(
ANALYTICS_SNAPSHOT_ENDPOINT_URL, json=payload, headers=headers
)
if response.status == 200: # OK
response_data = await response.json()
new_identifier = response_data.get("submission_identifier")
if (
new_identifier is not None
and new_identifier != self._data.submission_identifier
):
self._data.submission_identifier = new_identifier
await self._save()
LOGGER.info(
"Submitted snapshot analytics to Home Assistant servers"
)
elif response.status == 400: # Bad Request
response_data = await response.json()
error_kind = response_data.get("kind", "unknown")
error_message = response_data.get("message", "Unknown error")
if error_kind == "invalid-submission-identifier":
# Clear the invalid identifier and retry on next cycle
LOGGER.warning(
"Invalid submission identifier to %s, clearing: %s",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
error_message,
)
self._data.submission_identifier = None
await self._save()
else:
LOGGER.warning(
"Malformed snapshot analytics submission (%s) to %s: %s",
error_kind,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
error_message,
)
elif response.status == 503: # Service Unavailable
response_text = await response.text()
LOGGER.warning(
"Snapshot analytics service %s unavailable: %s",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
response_text,
)
else:
LOGGER.warning(
"Unexpected status code %s when submitting snapshot analytics to %s",
response.status,
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
)
except TimeoutError:
LOGGER.error(
"Timeout sending snapshot analytics to %s",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
)
except aiohttp.ClientError as err:
LOGGER.error(
"Error sending snapshot analytics to %s: %r",
ANALYTICS_SNAPSHOT_ENDPOINT_URL,
err,
)
async def async_schedule(self) -> None:
"""Schedule analytics."""
if not self.onboarded:
LOGGER.debug("Analytics not scheduled")
if self._basic_scheduled is not None:
self._basic_scheduled()
self._basic_scheduled = None
if self._snapshot_scheduled:
self._snapshot_scheduled()
self._snapshot_scheduled = None
return
if not self.preferences.get(ATTR_BASE, False):
LOGGER.debug("Basic analytics not scheduled")
if self._basic_scheduled is not None:
self._basic_scheduled()
self._basic_scheduled = None
elif self._basic_scheduled is None:
# Wait 15 min after started for basic analytics
self._basic_scheduled = async_call_later(
self.hass,
900,
HassJob(
self._async_schedule_basic,
name="basic analytics schedule",
cancel_on_shutdown=True,
),
)
if not self.preferences.get(ATTR_SNAPSHOTS, False) or RELEASE_CHANNEL not in (
ReleaseChannel.DEV,
ReleaseChannel.NIGHTLY,
):
LOGGER.debug("Snapshot analytics not scheduled")
if self._snapshot_scheduled:
self._snapshot_scheduled()
self._snapshot_scheduled = None
elif self._snapshot_scheduled is None:
snapshot_submission_time = self._data.snapshot_submission_time
if snapshot_submission_time is None:
# Randomize the submission time within the 24 hours
snapshot_submission_time = random.uniform(0, 86400)
self._data.snapshot_submission_time = snapshot_submission_time
await self._save()
LOGGER.debug(
"Initialized snapshot submission time to %s",
snapshot_submission_time,
)
# Calculate delay until next submission
current_time = time.time()
delay = (snapshot_submission_time - current_time) % 86400
self._snapshot_scheduled = async_call_later(
self.hass,
delay,
HassJob(
self._async_schedule_snapshots,
name="snapshot analytics schedule",
cancel_on_shutdown=True,
),
)
async def _async_schedule_basic(self, _: datetime | None = None) -> None:
"""Schedule basic analytics."""
await self.send_analytics()
# Send basic analytics every day
self._basic_scheduled = async_track_time_interval(
self.hass,
self.send_analytics,
INTERVAL,
name="basic analytics daily",
cancel_on_shutdown=True,
)
async def _async_schedule_snapshots(self, _: datetime | None = None) -> None:
"""Schedule snapshot analytics."""
await self.send_snapshot()
# Send snapshot analytics every day
self._snapshot_scheduled = async_track_time_interval(
self.hass,
self.send_snapshot,
INTERVAL,
name="snapshot analytics daily",
cancel_on_shutdown=True,
)
def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
"""Extract domains from the YAML configuration."""
@@ -708,8 +505,8 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
"""Return detailed information about entities and devices for a snapshot."""
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"""Return detailed information about entities and devices."""
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
@@ -914,13 +711,8 @@ async def _async_snapshot_payload(hass: HomeAssistant) -> dict: # noqa: C901
entities_info.append(entity_info)
return integrations_info
async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return detailed information about entities and devices for a direct download."""
return {
"version": f"home-assistant:{SNAPSHOT_VERSION}",
"version": "home-assistant:1",
"home_assistant": HA_VERSION,
"integrations": await _async_snapshot_payload(hass),
"integrations": integrations_info,
}

View File

@@ -7,8 +7,6 @@ import voluptuous as vol
ANALYTICS_ENDPOINT_URL = "https://analytics-api.home-assistant.io/v1"
ANALYTICS_ENDPOINT_URL_DEV = "https://analytics-api-dev.home-assistant.io/v1"
SNAPSHOT_VERSION = "1"
ANALYTICS_SNAPSHOT_ENDPOINT_URL = f"https://device-database.eco-dev-aws.openhomefoundation.com/api/v1/snapshot/{SNAPSHOT_VERSION}"
DOMAIN = "analytics"
INTERVAL = timedelta(days=1)
STORAGE_KEY = "core.analytics"
@@ -40,7 +38,6 @@ ATTR_PREFERENCES = "preferences"
ATTR_PROTECTED = "protected"
ATTR_RECORDER = "recorder"
ATTR_SLUG = "slug"
ATTR_SNAPSHOTS = "snapshots"
ATTR_STATE_COUNT = "state_count"
ATTR_STATISTICS = "statistics"
ATTR_SUPERVISOR = "supervisor"
@@ -54,7 +51,6 @@ ATTR_VERSION = "version"
PREFERENCE_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_BASE): bool,
vol.Optional(ATTR_SNAPSHOTS): bool,
vol.Optional(ATTR_DIAGNOSTICS): bool,
vol.Optional(ATTR_STATISTICS): bool,
vol.Optional(ATTR_USAGE): bool,

View File

@@ -7,26 +7,3 @@ CONNECTION_TIMEOUT: int = 10
# Field name of last self test retrieved from apcupsd.
LAST_S_TEST: Final = "laststest"
# Mapping of deprecated sensor keys (as reported by apcupsd, lower-cased) to their deprecation
# repair issue translation keys.
DEPRECATED_SENSORS: Final = {
"apc": "apc_deprecated",
"end apc": "date_deprecated",
"date": "date_deprecated",
"apcmodel": "available_via_device_info",
"model": "available_via_device_info",
"firmware": "available_via_device_info",
"version": "available_via_device_info",
"upsname": "available_via_device_info",
"serialno": "available_via_device_info",
}
AVAILABLE_VIA_DEVICE_ATTR: Final = {
"apcmodel": "model",
"model": "model",
"firmware": "hw_version",
"version": "sw_version",
"upsname": "name",
"serialno": "serial_number",
}

View File

@@ -4,8 +4,6 @@ from __future__ import annotations
import logging
from homeassistant.components.automation import automations_with_entity
from homeassistant.components.script import scripts_with_entity
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
@@ -24,11 +22,9 @@ from homeassistant.const import (
UnitOfTime,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.helpers.issue_registry as ir
from .const import AVAILABLE_VIA_DEVICE_ATTR, DEPRECATED_SENSORS, DOMAIN, LAST_S_TEST
from .const import LAST_S_TEST
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
from .entity import APCUPSdEntity
@@ -532,62 +528,3 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
if not self.native_unit_of_measurement:
self._attr_native_unit_of_measurement = inferred_unit
async def async_added_to_hass(self) -> None:
"""Handle when entity is added to Home Assistant.
If this is a deprecated sensor entity, create a repair issue to guide
the user to disable it.
"""
await super().async_added_to_hass()
if not self.enabled:
return
reason = DEPRECATED_SENSORS.get(self.entity_description.key)
if not reason:
return
automations = automations_with_entity(self.hass, self.entity_id)
scripts = scripts_with_entity(self.hass, self.entity_id)
if not automations and not scripts:
return
entity_registry = er.async_get(self.hass)
items = [
f"- [{entry.name or entry.original_name or entity_id}]"
f"(/config/{integration}/edit/{entry.unique_id or entity_id.split('.', 1)[-1]})"
for integration, entities in (
("automation", automations),
("script", scripts),
)
for entity_id in entities
if (entry := entity_registry.async_get(entity_id))
]
placeholders = {
"entity_name": str(self.name or self.entity_id),
"entity_id": self.entity_id,
"items": "\n".join(items),
}
if via_attr := AVAILABLE_VIA_DEVICE_ATTR.get(self.entity_description.key):
placeholders["available_via_device_attr"] = via_attr
if device_entry := self.device_entry:
placeholders["device_id"] = device_entry.id
ir.async_create_issue(
self.hass,
DOMAIN,
f"{reason}_{self.entity_id}",
breaks_in_ha_version="2026.6.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key=reason,
translation_placeholders=placeholders,
)
async def async_will_remove_from_hass(self) -> None:
"""Handle when entity will be removed from Home Assistant."""
await super().async_will_remove_from_hass()
if issue_key := DEPRECATED_SENSORS.get(self.entity_description.key):
ir.async_delete_issue(self.hass, DOMAIN, f"{issue_key}_{self.entity_id}")

View File

@@ -241,19 +241,5 @@
"cannot_connect": {
"message": "Cannot connect to APC UPS Daemon."
}
},
"issues": {
"apc_deprecated": {
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because it exposes internal details of the APC UPS Daemon response.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use supported APC UPS entities instead. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
"title": "{entity_name} sensor is deprecated"
},
"available_via_device_info": {
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the same value is available from the device registry via `device_attr(\"{device_id}\", \"{available_via_device_attr}\")`.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to use the `device_attr` helper instead of this sensor. Reload the APC UPS Daemon integration afterwards to resolve this issue.",
"title": "{entity_name} sensor is deprecated"
},
"date_deprecated": {
"description": "The {entity_name} sensor (`{entity_id}`) is deprecated because the timestamp is already available from other APC UPS sensors via their last updated time.\n\nIt is still referenced in the following automations or scripts:\n{items}\n\nUpdate those automations or scripts to reference any entity's `last_updated` attribute instead (for example, `states.binary_sensor.apcups_online_status.last_updated`). Reload the APC UPS Daemon integration afterwards to resolve this issue.",
"title": "{entity_name} sensor is deprecated"
}
}
}

View File

@@ -24,7 +24,7 @@ class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
serial_number=coordinator.brother.serial,
manufacturer="Brother",
model_id=coordinator.brother.model,
model=coordinator.brother.model,
name=coordinator.brother.model,
sw_version=coordinator.brother.firmware,
)

View File

@@ -17,7 +17,7 @@ from homeassistant.components.sensor import (
SensorStateClass,
)
from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -345,10 +345,12 @@ class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
"""Initialize."""
super().__init__(coordinator)
self._attr_native_value = description.value(coordinator.data)
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
self.entity_description = description
@property
def native_value(self) -> StateType | datetime:
"""Return the native value of the sensor."""
return self.entity_description.value(self.coordinator.data)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._attr_native_value = self.entity_description.value(self.coordinator.data)
self.async_write_ha_state()

View File

@@ -18,7 +18,6 @@ def async_setup(hass: HomeAssistant) -> bool:
websocket_api.async_register_command(hass, websocket_create_area)
websocket_api.async_register_command(hass, websocket_delete_area)
websocket_api.async_register_command(hass, websocket_update_area)
websocket_api.async_register_command(hass, websocket_reorder_areas)
return True
@@ -146,27 +145,3 @@ def websocket_update_area(
connection.send_error(msg["id"], "invalid_info", str(err))
else:
connection.send_result(msg["id"], entry.json_fragment)
@websocket_api.websocket_command(
{
vol.Required("type"): "config/area_registry/reorder",
vol.Required("area_ids"): [str],
}
)
@websocket_api.require_admin
@callback
def websocket_reorder_areas(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Handle reorder areas websocket command."""
registry = ar.async_get(hass)
try:
registry.async_reorder(msg["area_ids"])
except ValueError as err:
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
else:
connection.send_result(msg["id"])

View File

@@ -18,7 +18,6 @@ def async_setup(hass: HomeAssistant) -> bool:
websocket_api.async_register_command(hass, websocket_create_floor)
websocket_api.async_register_command(hass, websocket_delete_floor)
websocket_api.async_register_command(hass, websocket_update_floor)
websocket_api.async_register_command(hass, websocket_reorder_floors)
return True
@@ -128,28 +127,6 @@ def websocket_update_floor(
connection.send_result(msg["id"], _entry_dict(entry))
@websocket_api.websocket_command(
{
vol.Required("type"): "config/floor_registry/reorder",
vol.Required("floor_ids"): [str],
}
)
@websocket_api.require_admin
@callback
def websocket_reorder_floors(
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
) -> None:
"""Handle reorder floors websocket command."""
registry = fr.async_get(hass)
try:
registry.async_reorder(msg["floor_ids"])
except ValueError as err:
connection.send_error(msg["id"], websocket_api.ERR_INVALID_FORMAT, str(err))
else:
connection.send_result(msg["id"])
@callback
def _entry_dict(entry: FloorEntry) -> dict[str, Any]:
"""Convert entry to API format."""

View File

@@ -1 +0,0 @@
"""Virtual integration: Cosori."""

View File

@@ -1,6 +0,0 @@
{
"domain": "cosori",
"name": "Cosori",
"integration_type": "virtual",
"supported_by": "vesync"
}

View File

@@ -9,7 +9,6 @@ from homeassistant.const import CONF_ACCESS_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util.ssl import get_default_context
from .const import (
CONF_AUTHORIZE_STRING,
@@ -32,13 +31,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool
expires_at=entry.data[CONF_EXPIRES_AT],
)
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
ssl_context = get_default_context()
try:
cync = await Cync.create(
auth=cync_auth,
ssl_context=ssl_context,
)
cync = await Cync.create(cync_auth)
except AuthFailedError as ex:
raise ConfigEntryAuthFailed("User token invalid") from ex
except CyncError as ex:

View File

@@ -11,14 +11,11 @@ import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.websocket_api import ActiveConnection
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import singleton
from homeassistant.helpers.storage import Store
from homeassistant.util.hass_dict import HassKey
DATA_STORAGE: HassKey[dict[str, UserStore]] = HassKey("frontend_storage")
DATA_SYSTEM_STORAGE: HassKey[SystemStore] = HassKey("frontend_system_storage")
STORAGE_VERSION_USER_DATA = 1
STORAGE_VERSION_SYSTEM_DATA = 1
async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
@@ -26,9 +23,6 @@ async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
websocket_api.async_register_command(hass, websocket_set_user_data)
websocket_api.async_register_command(hass, websocket_get_user_data)
websocket_api.async_register_command(hass, websocket_subscribe_user_data)
websocket_api.async_register_command(hass, websocket_set_system_data)
websocket_api.async_register_command(hass, websocket_get_system_data)
websocket_api.async_register_command(hass, websocket_subscribe_system_data)
async def async_user_store(hass: HomeAssistant, user_id: str) -> UserStore:
@@ -89,52 +83,6 @@ class _UserStore(Store[dict[str, Any]]):
)
@singleton.singleton(DATA_SYSTEM_STORAGE, async_=True)
async def async_system_store(hass: HomeAssistant) -> SystemStore:
"""Access the system store."""
store = SystemStore(hass)
await store.async_load()
return store
class SystemStore:
"""System store for frontend data."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the system store."""
self._store: Store[dict[str, Any]] = Store(
hass,
STORAGE_VERSION_SYSTEM_DATA,
"frontend.system_data",
)
self.data: dict[str, Any] = {}
self.subscriptions: dict[str, list[Callable[[], None]]] = {}
async def async_load(self) -> None:
"""Load the data from the store."""
self.data = await self._store.async_load() or {}
async def async_set_item(self, key: str, value: Any) -> None:
"""Set an item and save the store."""
self.data[key] = value
self._store.async_delay_save(lambda: self.data, 1.0)
for cb in self.subscriptions.get(key, []):
cb()
@callback
def async_subscribe(
self, key: str, on_update_callback: Callable[[], None]
) -> Callable[[], None]:
"""Subscribe to store updates."""
self.subscriptions.setdefault(key, []).append(on_update_callback)
def unsubscribe() -> None:
"""Unsubscribe from the store."""
self.subscriptions[key].remove(on_update_callback)
return unsubscribe
def with_user_store(
orig_func: Callable[
[HomeAssistant, ActiveConnection, dict[str, Any], UserStore],
@@ -159,28 +107,6 @@ def with_user_store(
return with_user_store_func
def with_system_store(
orig_func: Callable[
[HomeAssistant, ActiveConnection, dict[str, Any], SystemStore],
Coroutine[Any, Any, None],
],
) -> Callable[
[HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None]
]:
"""Decorate function to provide system store."""
@wraps(orig_func)
async def with_system_store_func(
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
) -> None:
"""Provide system store to function."""
store = await async_system_store(hass)
await orig_func(hass, connection, msg, store)
return with_system_store_func
@websocket_api.websocket_command(
{
vol.Required("type"): "frontend/set_user_data",
@@ -243,65 +169,3 @@ async def websocket_subscribe_user_data(
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
on_data_update()
connection.send_result(msg["id"])
@websocket_api.websocket_command(
{
vol.Required("type"): "frontend/set_system_data",
vol.Required("key"): str,
vol.Required("value"): vol.Any(bool, str, int, float, dict, list, None),
}
)
@websocket_api.require_admin
@websocket_api.async_response
@with_system_store
async def websocket_set_system_data(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
store: SystemStore,
) -> None:
"""Handle set system data command."""
await store.async_set_item(msg["key"], msg["value"])
connection.send_result(msg["id"])
@websocket_api.websocket_command(
{vol.Required("type"): "frontend/get_system_data", vol.Required("key"): str}
)
@websocket_api.async_response
@with_system_store
async def websocket_get_system_data(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
store: SystemStore,
) -> None:
"""Handle get system data command."""
connection.send_result(msg["id"], {"value": store.data.get(msg["key"])})
@websocket_api.websocket_command(
{
vol.Required("type"): "frontend/subscribe_system_data",
vol.Required("key"): str,
}
)
@websocket_api.async_response
@with_system_store
async def websocket_subscribe_system_data(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict[str, Any],
store: SystemStore,
) -> None:
"""Handle subscribe to system data command."""
key: str = msg["key"]
def on_data_update() -> None:
"""Handle system data update."""
connection.send_event(msg["id"], {"value": store.data.get(key)})
connection.subscriptions[msg["id"]] = store.async_subscribe(key, on_data_update)
on_data_update()
connection.send_result(msg["id"])

View File

@@ -60,6 +60,35 @@ from .server import Server
_LOGGER = logging.getLogger(__name__)
_FFMPEG = "ffmpeg"
_SUPPORTED_STREAMS = frozenset(
(
"bubble",
"dvrip",
"expr",
_FFMPEG,
"gopro",
"homekit",
"http",
"https",
"httpx",
"isapi",
"ivideon",
"kasa",
"nest",
"onvif",
"roborock",
"rtmp",
"rtmps",
"rtmpx",
"rtsp",
"rtsps",
"rtspx",
"tapo",
"tcp",
"webrtc",
"webtorrent",
)
)
CONFIG_SCHEMA = vol.Schema(
{
@@ -168,7 +197,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bo
return False
provider = entry.runtime_data = WebRTCProvider(hass, url, session, client)
await provider.initialize()
entry.async_on_unload(async_register_webrtc_provider(hass, provider))
return True
@@ -200,21 +228,16 @@ class WebRTCProvider(CameraWebRTCProvider):
self._session = session
self._rest_client = rest_client
self._sessions: dict[str, Go2RtcWsClient] = {}
self._supported_schemes: set[str] = set()
@property
def domain(self) -> str:
"""Return the integration domain of the provider."""
return DOMAIN
async def initialize(self) -> None:
"""Initialize the provider."""
self._supported_schemes = await self._rest_client.schemes.list()
@callback
def async_is_supported(self, stream_source: str) -> bool:
"""Return if this provider is supports the Camera as source."""
return stream_source.partition(":")[0] in self._supported_schemes
return stream_source.partition(":")[0] in _SUPPORTED_STREAMS
async def async_handle_async_webrtc_offer(
self,

View File

@@ -6,4 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
HA_MANAGED_API_PORT = 11984
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
RECOMMENDED_VERSION = "1.9.12"
RECOMMENDED_VERSION = "1.9.11"

View File

@@ -8,6 +8,6 @@
"integration_type": "system",
"iot_class": "local_polling",
"quality_scale": "internal",
"requirements": ["go2rtc-client==0.3.0"],
"requirements": ["go2rtc-client==0.2.1"],
"single_config_entry": true
}

View File

@@ -29,18 +29,8 @@ _RESPAWN_COOLDOWN = 1
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
# Do not edit it manually
app:
modules: {app_modules}
api:
listen: "{api_ip}:{api_port}"
allow_paths: {api_allow_paths}
# ffmpeg needs the exec module
# Restrict execution to only ffmpeg binary
exec:
allow_paths:
- ffmpeg
rtsp:
listen: "127.0.0.1:18554"
@@ -50,43 +40,6 @@ webrtc:
ice_servers: []
"""
_APP_MODULES = (
"api",
"exec", # Execution module for ffmpeg
"ffmpeg",
"http",
"mjpeg",
"onvif",
"rtmp",
"rtsp",
"srtp",
"webrtc",
"ws",
)
_API_ALLOW_PATHS = (
"/", # UI static page and version control
"/api", # Main API path
"/api/frame.jpeg", # Snapshot functionality
"/api/schemes", # Supported stream schemes
"/api/streams", # Stream management
"/api/webrtc", # Webrtc functionality
"/api/ws", # Websocket functionality (e.g. webrtc candidates)
)
# Additional modules when UI is enabled
_UI_APP_MODULES = (
*_APP_MODULES,
"debug",
)
# Additional api paths when UI is enabled
_UI_API_ALLOW_PATHS = (
*_API_ALLOW_PATHS,
"/api/config", # UI config view
"/api/log", # UI log view
"/api/streams.dot", # UI network view
)
_LOG_LEVEL_MAP = {
"TRC": logging.DEBUG,
"DBG": logging.DEBUG,
@@ -108,34 +61,14 @@ class Go2RTCWatchdogError(HomeAssistantError):
"""Raised on watchdog error."""
def _format_list_for_yaml(items: tuple[str, ...]) -> str:
"""Format a list of strings for yaml config."""
if not items:
return "[]"
formatted_items = ",".join(f'"{item}"' for item in items)
return f"[{formatted_items}]"
def _create_temp_file(enable_ui: bool) -> str:
def _create_temp_file(api_ip: str) -> str:
"""Create temporary config file."""
app_modules: tuple[str, ...] = _APP_MODULES
api_paths: tuple[str, ...] = _API_ALLOW_PATHS
api_ip = _LOCALHOST_IP
if enable_ui:
app_modules = _UI_APP_MODULES
api_paths = _UI_API_ALLOW_PATHS
# Listen on all interfaces for allowing access from all ips
api_ip = ""
# Set delete=False to prevent the file from being deleted when the file is closed
# Linux is clearing tmp folder on reboot, so no need to delete it manually
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
file.write(
_GO2RTC_CONFIG_FORMAT.format(
api_ip=api_ip,
api_port=HA_MANAGED_API_PORT,
app_modules=_format_list_for_yaml(app_modules),
api_allow_paths=_format_list_for_yaml(api_paths),
api_ip=api_ip, api_port=HA_MANAGED_API_PORT
).encode()
)
return file.name
@@ -153,7 +86,10 @@ class Server:
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
self._process: asyncio.subprocess.Process | None = None
self._startup_complete = asyncio.Event()
self._enable_ui = enable_ui
self._api_ip = _LOCALHOST_IP
if enable_ui:
# Listen on all interfaces for allowing access from all ips
self._api_ip = ""
self._watchdog_task: asyncio.Task | None = None
self._watchdog_tasks: list[asyncio.Task] = []
@@ -168,7 +104,7 @@ class Server:
"""Start the server."""
_LOGGER.debug("Starting go2rtc server")
config_file = await self._hass.async_add_executor_job(
_create_temp_file, self._enable_ui
_create_temp_file, self._api_ip
)
self._startup_complete.clear()

View File

@@ -1,54 +0,0 @@
"""The Hanna Instruments integration."""
from __future__ import annotations
from typing import Any
from hanna_cloud import HannaCloudClient
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
from homeassistant.core import HomeAssistant
from .coordinator import HannaConfigEntry, HannaDataCoordinator
PLATFORMS = [Platform.SENSOR]
def _authenticate_and_get_devices(
api_client: HannaCloudClient,
email: str,
password: str,
) -> list[dict[str, Any]]:
"""Authenticate and get devices in a single executor job."""
api_client.authenticate(email, password)
return api_client.get_devices()
async def async_setup_entry(hass: HomeAssistant, entry: HannaConfigEntry) -> bool:
"""Set up Hanna Instruments from a config entry."""
api_client = HannaCloudClient()
devices = await hass.async_add_executor_job(
_authenticate_and_get_devices,
api_client,
entry.data[CONF_EMAIL],
entry.data[CONF_PASSWORD],
)
# Create device coordinators
device_coordinators = {}
for device in devices:
coordinator = HannaDataCoordinator(hass, entry, device, api_client)
await coordinator.async_config_entry_first_refresh()
device_coordinators[coordinator.device_identifier] = coordinator
# Set runtime data
entry.runtime_data = device_coordinators
# Forward the setup to the platforms
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: HannaConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -1,62 +0,0 @@
"""Config flow for Hanna Instruments integration."""
from __future__ import annotations
import logging
from typing import Any
from hanna_cloud import AuthenticationError, HannaCloudClient
from requests.exceptions import ConnectionError as RequestsConnectionError, Timeout
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class HannaConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Hanna Instruments."""
VERSION = 1
data_schema = vol.Schema(
{vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str}
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the setup flow."""
errors: dict[str, str] = {}
if user_input is not None:
await self.async_set_unique_id(user_input[CONF_EMAIL])
self._abort_if_unique_id_configured()
client = HannaCloudClient()
try:
await self.hass.async_add_executor_job(
client.authenticate,
user_input[CONF_EMAIL],
user_input[CONF_PASSWORD],
)
except (Timeout, RequestsConnectionError):
errors["base"] = "cannot_connect"
except AuthenticationError:
errors["base"] = "invalid_auth"
if not errors:
return self.async_create_entry(
title=user_input[CONF_EMAIL],
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
self.data_schema, user_input
),
errors=errors,
)

View File

@@ -1,3 +0,0 @@
"""Constants for the Hanna integration."""
DOMAIN = "hanna"

View File

@@ -1,72 +0,0 @@
"""Hanna Instruments data coordinator for Home Assistant.
This module provides the data coordinator for fetching and managing Hanna Instruments
sensor data.
"""
from datetime import timedelta
import logging
from typing import Any
from hanna_cloud import HannaCloudClient
from requests.exceptions import RequestException
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
type HannaConfigEntry = ConfigEntry[dict[str, HannaDataCoordinator]]
_LOGGER = logging.getLogger(__name__)
class HannaDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Coordinator for fetching Hanna sensor data."""
def __init__(
self,
hass: HomeAssistant,
config_entry: HannaConfigEntry,
device: dict[str, Any],
api_client: HannaCloudClient,
) -> None:
"""Initialize the Hanna data coordinator."""
self.api_client = api_client
self.device_data = device
super().__init__(
hass,
_LOGGER,
name=f"{DOMAIN}_{self.device_identifier}",
config_entry=config_entry,
update_interval=timedelta(seconds=30),
)
@property
def device_identifier(self) -> str:
"""Return the device identifier."""
return self.device_data["DID"]
def get_parameters(self) -> list[dict[str, Any]]:
"""Get all parameters from the sensor data."""
return self.api_client.parameters
def get_parameter_value(self, key: str) -> Any:
"""Get the value for a specific parameter."""
for parameter in self.get_parameters():
if parameter["name"] == key:
return parameter["value"]
return None
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch latest sensor data from the Hanna API."""
try:
readings = await self.hass.async_add_executor_job(
self.api_client.get_last_device_reading, self.device_identifier
)
except RequestException as e:
raise UpdateFailed(f"Error communicating with Hanna API: {e}") from e
except (KeyError, IndexError) as e:
raise UpdateFailed(f"Error parsing Hanna API response: {e}") from e
return readings

View File

@@ -1,28 +0,0 @@
"""Hanna Instruments entity base class for Home Assistant.
This module provides the base entity class for Hanna Instruments entities.
"""
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import HannaDataCoordinator
class HannaEntity(CoordinatorEntity[HannaDataCoordinator]):
"""Base class for Hanna entities."""
_attr_has_entity_name = True
def __init__(self, coordinator: HannaDataCoordinator) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, coordinator.device_identifier)},
manufacturer=coordinator.device_data.get("manufacturer"),
model=coordinator.device_data.get("DM"),
name=coordinator.device_data.get("name"),
serial_number=coordinator.device_data.get("serial_number"),
sw_version=coordinator.device_data.get("sw_version"),
)

View File

@@ -1,10 +0,0 @@
{
"domain": "hanna",
"name": "Hanna",
"codeowners": ["@bestycame"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/hanna",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["hanna-cloud==0.0.6"]
}

View File

@@ -1,70 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration doesn't add actions.
appropriate-polling:
status: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
This integration does not have any configuration parameters.
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow: todo
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: done
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: done
entity-disabled-by-default: todo
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: todo
inject-websession: todo
strict-typing: todo

View File

@@ -1,106 +0,0 @@
"""Hanna Instruments sensor integration for Home Assistant.
This module provides sensor entities for various Hanna Instruments devices,
including pH, ORP, temperature, and chemical sensors. It uses the Hanna API
to fetch readings and updates them periodically.
"""
from __future__ import annotations
import logging
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfElectricPotential, UnitOfTemperature, UnitOfVolume
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .coordinator import HannaConfigEntry, HannaDataCoordinator
from .entity import HannaEntity
_LOGGER = logging.getLogger(__name__)
SENSOR_DESCRIPTIONS = [
SensorEntityDescription(
key="ph",
translation_key="ph_value",
device_class=SensorDeviceClass.PH,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="orp",
translation_key="chlorine_orp_value",
device_class=SensorDeviceClass.VOLTAGE,
native_unit_of_measurement=UnitOfElectricPotential.MILLIVOLT,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="temp",
translation_key="water_temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="airTemp",
translation_key="air_temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="acidBase",
translation_key="ph_acid_base_flow_rate",
icon="mdi:chemical-weapon",
device_class=SensorDeviceClass.VOLUME,
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key="cl",
translation_key="chlorine_flow_rate",
icon="mdi:chemical-weapon",
device_class=SensorDeviceClass.VOLUME,
native_unit_of_measurement=UnitOfVolume.MILLILITERS,
state_class=SensorStateClass.MEASUREMENT,
),
]
async def async_setup_entry(
hass: HomeAssistant,
entry: HannaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Hanna sensors from a config entry."""
device_coordinators = entry.runtime_data
async_add_entities(
HannaSensor(coordinator, description)
for description in SENSOR_DESCRIPTIONS
for coordinator in device_coordinators.values()
)
class HannaSensor(HannaEntity, SensorEntity):
"""Representation of a Hanna sensor."""
def __init__(
self,
coordinator: HannaDataCoordinator,
description: SensorEntityDescription,
) -> None:
"""Initialize a Hanna sensor."""
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.device_identifier}_{description.key}"
self.entity_description = description
@property
def native_value(self) -> StateType:
"""Return the value reported by the sensor."""
return self.coordinator.get_parameter_value(self.entity_description.key)

View File

@@ -1,44 +0,0 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "Email address for your Hanna Cloud account",
"password": "Password for your Hanna Cloud account"
},
"description": "Enter your Hanna Cloud credentials"
}
}
},
"entity": {
"sensor": {
"air_temperature": {
"name": "Air temperature"
},
"chlorine_flow_rate": {
"name": "Chlorine flow rate"
},
"chlorine_orp_value": {
"name": "Chlorine ORP value"
},
"ph_acid_base_flow_rate": {
"name": "pH Acid/Base flow rate"
},
"water_temperature": {
"name": "Water temperature"
}
}
}
}

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
"integration_type": "system",
"requirements": [
"universal-silabs-flasher==0.1.2",
"universal-silabs-flasher==0.1.0",
"ha-silabs-firmware-client==0.3.0"
]
}

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_push",
"loggers": ["aioautomower"],
"quality_scale": "silver",
"requirements": ["aioautomower==2.7.1"]
"requirements": ["aioautomower==2.7.0"]
}

View File

@@ -112,7 +112,6 @@ async def async_setup_entry(
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(hours=1),
config_entry=entry,
)
# Fetch initial data so we have data when entities subscribe

View File

@@ -11,11 +11,6 @@ from random import random
import voluptuous as vol
from homeassistant.components.labs import (
EVENT_LABS_UPDATED,
EventLabsUpdatedData,
async_is_preview_feature_enabled,
)
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
from homeassistant.components.recorder.models import (
StatisticData,
@@ -35,14 +30,10 @@ from homeassistant.const import (
UnitOfTemperature,
UnitOfVolume,
)
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.issue_registry import (
IssueSeverity,
async_create_issue,
async_delete_issue,
)
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from homeassistant.util.unit_conversion import (
@@ -119,23 +110,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# Notify backup listeners
hass.async_create_task(_notify_backup_listeners(hass), eager_start=False)
# Subscribe to labs feature updates for kitchen_sink preview repair
@callback
def _async_labs_updated(event: Event[EventLabsUpdatedData]) -> None:
"""Handle labs feature update event."""
if (
event.data["domain"] == "kitchen_sink"
and event.data["preview_feature"] == "special_repair"
):
_async_update_special_repair(hass)
entry.async_on_unload(
hass.bus.async_listen(EVENT_LABS_UPDATED, _async_labs_updated)
)
# Check if lab feature is currently enabled and create repair if so
_async_update_special_repair(hass)
return True
@@ -163,27 +137,6 @@ async def async_remove_config_entry_device(
return True
@callback
def _async_update_special_repair(hass: HomeAssistant) -> None:
"""Create or delete the special repair issue.
Creates a repair issue when the special_repair lab feature is enabled,
and deletes it when disabled. This demonstrates how lab features can interact
with Home Assistant's repair system.
"""
if async_is_preview_feature_enabled(hass, DOMAIN, "special_repair"):
async_create_issue(
hass,
DOMAIN,
"kitchen_sink_special_repair_issue",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="special_repair",
)
else:
async_delete_issue(hass, DOMAIN, "kitchen_sink_special_repair_issue")
async def _notify_backup_listeners(hass: HomeAssistant) -> None:
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
listener()

View File

@@ -5,13 +5,6 @@
"codeowners": ["@home-assistant/core"],
"documentation": "https://www.home-assistant.io/integrations/kitchen_sink",
"iot_class": "calculated",
"preview_features": {
"special_repair": {
"feedback_url": "https://community.home-assistant.io",
"learn_more_url": "https://www.home-assistant.io/integrations/kitchen_sink",
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/kitchen_sink&integration_name=Kitchen%20Sink"
}
},
"quality_scale": "internal",
"single_config_entry": true
}

View File

@@ -71,10 +71,6 @@
},
"title": "The blinker fluid is empty and needs to be refilled"
},
"special_repair": {
"description": "This is a special repair created by a preview feature! This demonstrates how lab features can interact with the Home Assistant repair system. You can disable this by turning off the kitchen sink special repair feature in Settings > System > Labs.",
"title": "Special repair feature preview"
},
"transmogrifier_deprecated": {
"description": "The transmogrifier component is now deprecated due to the lack of local control available in the new API",
"title": "The transmogrifier component is deprecated"
@@ -107,14 +103,6 @@
}
}
},
"preview_features": {
"special_repair": {
"description": "Creates a **special repair issue** when enabled.\n\nThis demonstrates how lab features can interact with other Home Assistant integrations.",
"disable_confirmation": "This will remove the special repair issue. Don't worry, this is just a demonstration feature.",
"enable_confirmation": "This will create a special repair issue to demonstrate Labs preview features. This is just an example and won't affect your actual system.",
"name": "Special repair"
}
},
"services": {
"test_service_1": {
"description": "Fake action for testing",

View File

@@ -1,310 +0,0 @@
"""The Home Assistant Labs integration.
This integration provides preview features that can be toggled on/off by users.
Integrations can register lab preview features in their manifest.json which will appear
in the Home Assistant Labs UI for users to enable or disable.
"""
from __future__ import annotations
import logging
from typing import Any
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.backup import async_get_manager
from homeassistant.core import HomeAssistant, callback
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.storage import Store
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import async_get_custom_components
from .const import (
DOMAIN,
EVENT_LABS_UPDATED,
LABS_DATA,
STORAGE_KEY,
STORAGE_VERSION,
EventLabsUpdatedData,
LabPreviewFeature,
LabsData,
LabsStoreData,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
__all__ = [
"EVENT_LABS_UPDATED",
"EventLabsUpdatedData",
"async_is_preview_feature_enabled",
]
class LabsStorage(Store[LabsStoreData]):
"""Custom Store for Labs that converts between runtime and storage formats.
Runtime format: {"preview_feature_status": {(domain, preview_feature)}}
Storage format: {"preview_feature_status": [{"domain": str, "preview_feature": str}]}
Only enabled features are saved to storage - if stored, it's enabled.
"""
async def _async_load_data(self) -> LabsStoreData | None:
"""Load data and convert from storage format to runtime format."""
raw_data = await super()._async_load_data()
if raw_data is None:
return None
status_list = raw_data.get("preview_feature_status", [])
# Convert list of objects to runtime set - if stored, it's enabled
return {
"preview_feature_status": {
(item["domain"], item["preview_feature"]) for item in status_list
}
}
def _write_data(self, path: str, data: dict) -> None:
"""Convert from runtime format to storage format and write.
Only saves enabled features - disabled is the default.
"""
# Extract the actual data (has version/key wrapper)
actual_data = data.get("data", data)
# Check if this is Labs data (has preview_feature_status key)
if "preview_feature_status" not in actual_data:
# Not Labs data, write as-is
super()._write_data(path, data)
return
preview_status = actual_data["preview_feature_status"]
# Convert from runtime format (set of tuples) to storage format (list of dicts)
status_list = [
{"domain": domain, "preview_feature": preview_feature}
for domain, preview_feature in preview_status
]
# Build the final data structure with converted format
data_copy = data.copy()
data_copy["data"] = {"preview_feature_status": status_list}
super()._write_data(path, data_copy)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Labs component."""
store = LabsStorage(hass, STORAGE_VERSION, STORAGE_KEY, private=True)
data = await store.async_load()
if data is None:
data = {"preview_feature_status": set()}
# Scan ALL integrations for lab preview features (loaded or not)
lab_preview_features = await _async_scan_all_preview_features(hass)
# Clean up preview features that no longer exist
if lab_preview_features:
valid_keys = {
(pf.domain, pf.preview_feature) for pf in lab_preview_features.values()
}
stale_keys = data["preview_feature_status"] - valid_keys
if stale_keys:
_LOGGER.debug(
"Removing %d stale preview features: %s",
len(stale_keys),
stale_keys,
)
data["preview_feature_status"] -= stale_keys
await store.async_save(data)
hass.data[LABS_DATA] = LabsData(
store=store,
data=data,
preview_features=lab_preview_features,
)
websocket_api.async_register_command(hass, websocket_list_preview_features)
websocket_api.async_register_command(hass, websocket_update_preview_feature)
return True
def _populate_preview_features(
preview_features: dict[str, LabPreviewFeature],
domain: str,
labs_preview_features: dict[str, dict[str, str]],
is_built_in: bool = True,
) -> None:
"""Populate preview features dictionary from integration preview_features.
Args:
preview_features: Dictionary to populate
domain: Integration domain
labs_preview_features: Dictionary of preview feature definitions from manifest
is_built_in: Whether this is a built-in integration
"""
for preview_feature_key, preview_feature_data in labs_preview_features.items():
preview_feature = LabPreviewFeature(
domain=domain,
preview_feature=preview_feature_key,
is_built_in=is_built_in,
feedback_url=preview_feature_data.get("feedback_url"),
learn_more_url=preview_feature_data.get("learn_more_url"),
report_issue_url=preview_feature_data.get("report_issue_url"),
)
preview_features[preview_feature.full_key] = preview_feature
async def _async_scan_all_preview_features(
hass: HomeAssistant,
) -> dict[str, LabPreviewFeature]:
"""Scan ALL available integrations for lab preview features (loaded or not)."""
preview_features: dict[str, LabPreviewFeature] = {}
# Load pre-generated built-in lab preview features (already includes all data)
for domain, domain_preview_features in LABS_PREVIEW_FEATURES.items():
_populate_preview_features(
preview_features, domain, domain_preview_features, is_built_in=True
)
# Scan custom components
custom_integrations = await async_get_custom_components(hass)
_LOGGER.debug(
"Loaded %d built-in + scanning %d custom integrations for lab preview features",
len(preview_features),
len(custom_integrations),
)
for integration in custom_integrations.values():
if labs_preview_features := integration.preview_features:
_populate_preview_features(
preview_features,
integration.domain,
labs_preview_features,
is_built_in=False,
)
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
return preview_features
@callback
def async_is_preview_feature_enabled(
hass: HomeAssistant, domain: str, preview_feature: str
) -> bool:
"""Check if a lab preview feature is enabled.
Args:
hass: HomeAssistant instance
domain: Integration domain
preview_feature: Preview feature name
Returns:
True if the preview feature is enabled, False otherwise
"""
if LABS_DATA not in hass.data:
return False
labs_data = hass.data[LABS_DATA]
return (domain, preview_feature) in labs_data.data["preview_feature_status"]
@callback
@websocket_api.require_admin
@websocket_api.websocket_command({vol.Required("type"): "labs/list"})
def websocket_list_preview_features(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""List all lab preview features filtered by loaded integrations."""
labs_data = hass.data[LABS_DATA]
loaded_components = hass.config.components
preview_features: list[dict[str, Any]] = [
preview_feature.to_dict(
(preview_feature.domain, preview_feature.preview_feature)
in labs_data.data["preview_feature_status"]
)
for preview_feature_key, preview_feature in labs_data.preview_features.items()
if preview_feature.domain in loaded_components
]
connection.send_result(msg["id"], {"features": preview_features})
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required("type"): "labs/update",
vol.Required("domain"): str,
vol.Required("preview_feature"): str,
vol.Required("enabled"): bool,
vol.Optional("create_backup", default=False): bool,
}
)
@websocket_api.async_response
async def websocket_update_preview_feature(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Update a lab preview feature state."""
domain = msg["domain"]
preview_feature = msg["preview_feature"]
enabled = msg["enabled"]
create_backup = msg["create_backup"]
labs_data = hass.data[LABS_DATA]
# Build preview_feature_id for lookup
preview_feature_id = f"{domain}.{preview_feature}"
# Validate preview feature exists
if preview_feature_id not in labs_data.preview_features:
connection.send_error(
msg["id"],
websocket_api.ERR_NOT_FOUND,
f"Preview feature {preview_feature_id} not found",
)
return
# Create backup if requested and enabling
if create_backup and enabled:
try:
backup_manager = async_get_manager(hass)
await backup_manager.async_create_automatic_backup()
except Exception as err: # noqa: BLE001 - websocket handlers can catch broad exceptions
connection.send_error(
msg["id"],
websocket_api.ERR_UNKNOWN_ERROR,
f"Error creating backup: {err}",
)
return
# Update storage (only store enabled features, remove if disabled)
if enabled:
labs_data.data["preview_feature_status"].add((domain, preview_feature))
else:
labs_data.data["preview_feature_status"].discard((domain, preview_feature))
# Save changes immediately
await labs_data.store.async_save(labs_data.data)
# Fire event
event_data: EventLabsUpdatedData = {
"domain": domain,
"preview_feature": preview_feature,
"enabled": enabled,
}
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
connection.send_result(msg["id"])

View File

@@ -1,77 +0,0 @@
"""Constants for the Home Assistant Labs integration."""
from __future__ import annotations
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, TypedDict
from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from homeassistant.helpers.storage import Store
DOMAIN = "labs"
STORAGE_KEY = "core.labs"
STORAGE_VERSION = 1
EVENT_LABS_UPDATED = "labs_updated"
class EventLabsUpdatedData(TypedDict):
"""Event data for labs_updated event."""
domain: str
preview_feature: str
enabled: bool
@dataclass(frozen=True, kw_only=True, slots=True)
class LabPreviewFeature:
"""Lab preview feature definition."""
domain: str
preview_feature: str
is_built_in: bool = True
feedback_url: str | None = None
learn_more_url: str | None = None
report_issue_url: str | None = None
@property
def full_key(self) -> str:
"""Return the full key for the preview feature (domain.preview_feature)."""
return f"{self.domain}.{self.preview_feature}"
def to_dict(self, enabled: bool) -> dict[str, str | bool | None]:
"""Return a serialized version of the preview feature.
Args:
enabled: Whether the preview feature is currently enabled
Returns:
Dictionary with preview feature data including enabled status
"""
return {
"preview_feature": self.preview_feature,
"domain": self.domain,
"enabled": enabled,
"is_built_in": self.is_built_in,
"feedback_url": self.feedback_url,
"learn_more_url": self.learn_more_url,
"report_issue_url": self.report_issue_url,
}
type LabsStoreData = dict[str, set[tuple[str, str]]]
@dataclass
class LabsData:
"""Storage class for Labs global data."""
store: Store[LabsStoreData]
data: LabsStoreData
preview_features: dict[str, LabPreviewFeature] = field(default_factory=dict)
LABS_DATA: HassKey[LabsData] = HassKey(DOMAIN)

View File

@@ -1,9 +0,0 @@
{
"domain": "labs",
"name": "Home Assistant Labs",
"codeowners": ["@home-assistant/core"],
"documentation": "https://www.home-assistant.io/integrations/labs",
"integration_type": "system",
"iot_class": "calculated",
"quality_scale": "internal"
}

View File

@@ -1,3 +0,0 @@
{
"title": "Home Assistant Labs"
}

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
from abc import abstractmethod
from asyncio import Task
from dataclasses import dataclass
from datetime import timedelta
import logging
@@ -45,7 +44,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
_default_update_interval = SCAN_INTERVAL
config_entry: LaMarzoccoConfigEntry
_websocket_task: Task | None = None
websocket_terminated = True
def __init__(
self,
@@ -65,13 +64,6 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
self.device = device
self.cloud_client = cloud_client
@property
def websocket_terminated(self) -> bool:
"""Return True if the websocket task is terminated or not running."""
if self._websocket_task is None:
return True
return self._websocket_task.done()
async def _async_update_data(self) -> None:
"""Do the data update."""
try:
@@ -103,14 +95,13 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
# ensure token stays valid; does nothing if token is still valid
await self.cloud_client.async_get_access_token()
# Only skip websocket reconnection if it's currently connected and the task is still running
if self.device.websocket.connected and not self.websocket_terminated:
if self.device.websocket.connected:
return
await self.device.get_dashboard()
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())
self._websocket_task = self.config_entry.async_create_background_task(
self.config_entry.async_create_background_task(
hass=self.hass,
target=self.connect_websocket(),
name="lm_websocket_task",
@@ -129,6 +120,7 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
_LOGGER.debug("Init WebSocket in background task")
self.websocket_terminated = False
self.async_update_listeners()
await self.device.connect_dashboard_websocket(
@@ -137,6 +129,7 @@ class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
disconnect_callback=self.async_update_listeners,
)
self.websocket_terminated = True
self.async_update_listeners()

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from functools import partial
import logging
from typing import cast
import pypck
from pypck.connection import (
@@ -49,6 +48,7 @@ from .const import (
)
from .helpers import (
AddressType,
InputType,
LcnConfigEntry,
LcnRuntimeData,
async_update_config_entry,
@@ -285,7 +285,7 @@ def _async_fire_access_control_event(
hass: HomeAssistant,
device: dr.DeviceEntry | None,
address: AddressType,
inp: pypck.inputs.ModStatusAccessControl,
inp: InputType,
) -> None:
"""Fire access control event (transponder, transmitter, fingerprint, codelock)."""
event_data = {
@@ -299,11 +299,7 @@ def _async_fire_access_control_event(
if inp.periphery == pypck.lcn_defs.AccessControlPeriphery.TRANSMITTER:
event_data.update(
{
"level": inp.level,
"key": inp.key,
"action": cast(pypck.lcn_defs.KeyAction, inp.action).value,
}
{"level": inp.level, "key": inp.key, "action": inp.action.value}
)
event_name = f"lcn_{inp.periphery.value.lower()}"
@@ -314,7 +310,7 @@ def _async_fire_send_keys_event(
hass: HomeAssistant,
device: dr.DeviceEntry | None,
address: AddressType,
inp: pypck.inputs.ModSendKeysHost,
inp: InputType,
) -> None:
"""Fire send_keys event."""
for table, action in enumerate(inp.actions):

View File

@@ -100,6 +100,8 @@ class LcnClimate(LcnEntity, ClimateEntity):
self._max_temp = config[CONF_DOMAIN_DATA][CONF_MAX_TEMP]
self._min_temp = config[CONF_DOMAIN_DATA][CONF_MIN_TEMP]
self._current_temperature = None
self._target_temperature = None
self._is_on = True
self._attr_hvac_modes = [HVACMode.HEAT]
@@ -119,6 +121,16 @@ class LcnClimate(LcnEntity, ClimateEntity):
return UnitOfTemperature.FAHRENHEIT
return UnitOfTemperature.CELSIUS
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
return self._current_temperature
@property
def target_temperature(self) -> float | None:
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def hvac_mode(self) -> HVACMode:
"""Return hvac operation ie. heat, cool mode.
@@ -154,7 +166,7 @@ class LcnClimate(LcnEntity, ClimateEntity):
):
return
self._is_on = False
self._attr_target_temperature = None
self._target_temperature = None
self.async_write_ha_state()
async def async_set_temperature(self, **kwargs: Any) -> None:
@@ -166,7 +178,7 @@ class LcnClimate(LcnEntity, ClimateEntity):
self.setpoint, temperature, self.unit
):
return
self._attr_target_temperature = temperature
self._target_temperature = temperature
self.async_write_ha_state()
async def async_update(self) -> None:
@@ -186,14 +198,10 @@ class LcnClimate(LcnEntity, ClimateEntity):
return
if input_obj.get_var() == self.variable:
self._attr_current_temperature = float(
input_obj.get_value().to_var_unit(self.unit)
)
self._current_temperature = input_obj.get_value().to_var_unit(self.unit)
elif input_obj.get_var() == self.setpoint:
self._is_on = not input_obj.get_value().is_locked_regulator()
if self._is_on:
self._attr_target_temperature = float(
input_obj.get_value().to_var_unit(self.unit)
)
self._target_temperature = input_obj.get_value().to_var_unit(self.unit)
self.async_write_ha_state()

View File

@@ -120,7 +120,7 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
errors={CONF_BASE: error},
)
data: dict[str, Any] = {
data: dict = {
**user_input,
CONF_DEVICES: [],
CONF_ENTITIES: [],

View File

@@ -1,7 +1,7 @@
"""Support for LCN covers."""
import asyncio
from collections.abc import Coroutine, Iterable
from collections.abc import Iterable
from datetime import timedelta
from functools import partial
from typing import Any
@@ -81,8 +81,6 @@ class LcnOutputsCover(LcnEntity, CoverEntity):
_attr_is_opening = False
_attr_assumed_state = True
reverse_time: pypck.lcn_defs.MotorReverseTime | None
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
"""Initialize the LCN cover."""
super().__init__(config, config_entry)
@@ -257,15 +255,7 @@ class LcnRelayCover(LcnEntity, CoverEntity):
async def async_update(self) -> None:
"""Update the state of the entity."""
coros: list[
Coroutine[
Any,
Any,
pypck.inputs.ModStatusRelays
| pypck.inputs.ModStatusMotorPositionBS4
| None,
]
] = [self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)]
coros = [self.device_connection.request_status_relays(SCAN_INTERVAL.seconds)]
if self.positioning_mode == pypck.lcn_defs.MotorPositioningMode.BS4:
coros.append(
self.device_connection.request_status_motor_position(
@@ -293,7 +283,7 @@ class LcnRelayCover(LcnEntity, CoverEntity):
)
and input_obj.motor == self.motor.value
):
self._attr_current_cover_position = int(input_obj.position)
self._attr_current_cover_position = input_obj.position
if self._attr_current_cover_position in [0, 100]:
self._attr_is_opening = False
self._attr_is_closing = False

View File

@@ -2,8 +2,6 @@
from collections.abc import Callable
from pypck.device import DeviceConnection
from homeassistant.const import CONF_ADDRESS, CONF_DOMAIN, CONF_NAME
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import Entity
@@ -12,6 +10,7 @@ from homeassistant.helpers.typing import ConfigType
from .const import CONF_DOMAIN_DATA, DOMAIN
from .helpers import (
AddressType,
DeviceConnectionType,
InputType,
LcnConfigEntry,
generate_unique_id,
@@ -24,7 +23,7 @@ class LcnEntity(Entity):
"""Parent class for all entities associated with the LCN component."""
_attr_has_entity_name = True
device_connection: DeviceConnection
device_connection: DeviceConnectionType
def __init__(
self,
@@ -35,7 +34,7 @@ class LcnEntity(Entity):
self.config = config
self.config_entry = config_entry
self.address: AddressType = config[CONF_ADDRESS]
self._unregister_for_inputs: Callable[[], None] | None = None
self._unregister_for_inputs: Callable | None = None
self._name: str = config[CONF_NAME]
self._attr_device_info = DeviceInfo(
identifiers={

View File

@@ -11,7 +11,6 @@ from typing import cast
import pypck
from pypck.connection import PchkConnectionManager
from pypck.device import DeviceConnection
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
@@ -49,7 +48,7 @@ class LcnRuntimeData:
connection: PchkConnectionManager
"""Connection to PCHK host."""
device_connections: dict[str, DeviceConnection]
device_connections: dict[str, DeviceConnectionType]
"""Logical addresses of devices connected to the host."""
add_entities_callbacks: dict[str, Callable[[Iterable[ConfigType]], None]]
@@ -60,8 +59,9 @@ class LcnRuntimeData:
type LcnConfigEntry = ConfigEntry[LcnRuntimeData]
type AddressType = tuple[int, int, bool]
type DeviceConnectionType = pypck.module.ModuleConnection | pypck.module.GroupConnection
type InputType = pypck.inputs.Input
type InputType = type[pypck.inputs.Input]
# Regex for address validation
PATTERN_ADDRESS = re.compile(
@@ -82,11 +82,11 @@ DOMAIN_LOOKUP = {
def get_device_connection(
hass: HomeAssistant, address: AddressType, config_entry: LcnConfigEntry
) -> DeviceConnection:
) -> DeviceConnectionType:
"""Return a lcn device_connection."""
host_connection = config_entry.runtime_data.connection
addr = pypck.lcn_addr.LcnAddr(*address)
return host_connection.get_device_connection(addr)
return host_connection.get_address_conn(addr)
def get_resource(domain_name: str, domain_data: ConfigType) -> str:
@@ -246,7 +246,7 @@ def register_lcn_address_devices(
async def async_update_device_config(
device_connection: DeviceConnection, device_config: ConfigType
device_connection: DeviceConnectionType, device_config: ConfigType
) -> None:
"""Fill missing values in device_config with infos from LCN bus."""
# fetch serial info if device is module
@@ -269,10 +269,10 @@ async def async_update_device_config(
if device_config[CONF_NAME] != "":
return
device_name: str | None = None
device_name = ""
if not is_group:
device_name = await device_connection.request_name()
if is_group or device_name is None:
if is_group or device_name == "":
module_type = "Group" if is_group else "Module"
device_name = (
f"{module_type} "

View File

@@ -9,5 +9,5 @@
"iot_class": "local_polling",
"loggers": ["pypck"],
"quality_scale": "bronze",
"requirements": ["pypck==0.9.5", "lcn-frontend==0.2.7"]
"requirements": ["pypck==0.9.2", "lcn-frontend==0.2.7"]
}

View File

@@ -74,4 +74,4 @@ rules:
status: exempt
comment: |
Integration is not making any HTTP requests.
strict-typing: done
strict-typing: todo

View File

@@ -156,8 +156,6 @@ class LcnVariableSensor(LcnEntity, SensorEntity):
class LcnLedLogicSensor(LcnEntity, SensorEntity):
"""Representation of a LCN sensor for leds and logicops."""
source: pypck.lcn_defs.LedPort | pypck.lcn_defs.LogicOpPort
def __init__(self, config: ConfigType, config_entry: LcnConfigEntry) -> None:
"""Initialize the LCN sensor."""
super().__init__(config, config_entry)

View File

@@ -3,7 +3,6 @@
from enum import StrEnum, auto
import pypck
from pypck.device import DeviceConnection
import voluptuous as vol
from homeassistant.const import (
@@ -49,7 +48,7 @@ from .const import (
VAR_UNITS,
VARIABLES,
)
from .helpers import LcnConfigEntry, is_states_string
from .helpers import DeviceConnectionType, LcnConfigEntry, is_states_string
class LcnServiceCall:
@@ -66,7 +65,7 @@ class LcnServiceCall:
"""Initialize service call."""
self.hass = hass
def get_device_connection(self, service: ServiceCall) -> DeviceConnection:
def get_device_connection(self, service: ServiceCall) -> DeviceConnectionType:
"""Get address connection object."""
entries: list[LcnConfigEntry] = self.hass.config_entries.async_loaded_entries(
DOMAIN

View File

@@ -7,7 +7,6 @@ from functools import wraps
from typing import Any, Final
import lcn_frontend as lcn_panel
from pypck.device import DeviceConnection
import voluptuous as vol
from homeassistant.components import panel_custom, websocket_api
@@ -38,6 +37,7 @@ from .const import (
DOMAIN,
)
from .helpers import (
DeviceConnectionType,
LcnConfigEntry,
async_update_device_config,
generate_unique_id,
@@ -104,9 +104,7 @@ def get_config_entry(
@wraps(func)
async def get_entry(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""Get config_entry."""
if not (config_entry := hass.config_entries.async_get_entry(msg["entry_id"])):
@@ -126,7 +124,7 @@ def get_config_entry(
async def websocket_get_device_configs(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Get device configs."""
@@ -146,7 +144,7 @@ async def websocket_get_device_configs(
async def websocket_get_entity_configs(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Get entities configs."""
@@ -177,14 +175,14 @@ async def websocket_get_entity_configs(
async def websocket_scan_devices(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Scan for new devices."""
host_connection = config_entry.runtime_data.connection
await host_connection.scan_modules()
for device_connection in host_connection.device_connections.values():
for device_connection in host_connection.address_conns.values():
if not device_connection.is_group:
await async_create_or_update_device_in_config_entry(
hass, device_connection, config_entry
@@ -209,7 +207,7 @@ async def websocket_scan_devices(
async def websocket_add_device(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Add a device."""
@@ -255,7 +253,7 @@ async def websocket_add_device(
async def websocket_delete_device(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Delete a device."""
@@ -317,7 +315,7 @@ async def websocket_delete_device(
async def websocket_add_entity(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Add an entity."""
@@ -383,7 +381,7 @@ async def websocket_add_entity(
async def websocket_delete_entity(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
msg: dict,
config_entry: LcnConfigEntry,
) -> None:
"""Delete an entity."""
@@ -423,7 +421,7 @@ async def websocket_delete_entity(
async def async_create_or_update_device_in_config_entry(
hass: HomeAssistant,
device_connection: DeviceConnection,
device_connection: DeviceConnectionType,
config_entry: LcnConfigEntry,
) -> None:
"""Create or update device in config_entry according to given device_connection."""
@@ -453,7 +451,7 @@ async def async_create_or_update_device_in_config_entry(
def get_entity_entry(
hass: HomeAssistant, entity_config: dict[str, Any], config_entry: LcnConfigEntry
hass: HomeAssistant, entity_config: dict, config_entry: LcnConfigEntry
) -> er.RegistryEntry | None:
"""Get entity RegistryEntry from entity_config."""
entity_registry = er.async_get(hass)

View File

@@ -9,7 +9,7 @@
},
"iot_class": "local_push",
"loggers": ["pylutron_caseta"],
"requirements": ["pylutron-caseta==0.26.0"],
"requirements": ["pylutron-caseta==0.25.0"],
"zeroconf": [
{
"properties": {

View File

@@ -58,7 +58,7 @@ DISCOVERY_SCHEMAS = [
platform=Platform.BUTTON,
entity_description=MatterButtonEntityDescription(
key="IdentifyButton",
entity_category=EntityCategory.DIAGNOSTIC,
entity_category=EntityCategory.CONFIG,
device_class=ButtonDeviceClass.IDENTIFY,
command=lambda: clusters.Identify.Commands.Identify(identifyTime=15),
),

View File

@@ -183,13 +183,6 @@ PUMP_CONTROL_MODE_MAP = {
clusters.PumpConfigurationAndControl.Enums.ControlModeEnum.kUnknownEnumValue: None,
}
SETPOINT_CHANGE_SOURCE_MAP = {
clusters.Thermostat.Enums.SetpointChangeSourceEnum.kManual: "manual",
clusters.Thermostat.Enums.SetpointChangeSourceEnum.kSchedule: "schedule",
clusters.Thermostat.Enums.SetpointChangeSourceEnum.kExternal: "external",
clusters.Thermostat.Enums.SetpointChangeSourceEnum.kUnknownEnumValue: None,
}
HUMIDITY_SCALING_FACTOR = 100
TEMPERATURE_SCALING_FACTOR = 100
@@ -1495,47 +1488,4 @@ DISCOVERY_SCHEMAS = [
entity_class=MatterSensor,
required_attributes=(clusters.ServiceArea.Attributes.EstimatedEndTime,),
),
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=MatterSensorEntityDescription(
key="SetpointChangeSource",
translation_key="setpoint_change_source",
device_class=SensorDeviceClass.ENUM,
state_class=None,
# convert to set first to remove the duplicate unknown value
options=[x for x in SETPOINT_CHANGE_SOURCE_MAP.values() if x is not None],
device_to_ha=lambda x: SETPOINT_CHANGE_SOURCE_MAP[x],
),
entity_class=MatterSensor,
required_attributes=(clusters.Thermostat.Attributes.SetpointChangeSource,),
),
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=MatterSensorEntityDescription(
key="SetpointChangeSourceTimestamp",
translation_key="setpoint_change_timestamp",
device_class=SensorDeviceClass.TIMESTAMP,
state_class=None,
device_to_ha=(lambda x: dt_util.utc_from_timestamp(x) if x > 0 else None),
),
entity_class=MatterSensor,
required_attributes=(
clusters.Thermostat.Attributes.SetpointChangeSourceTimestamp,
),
),
MatterDiscoverySchema(
platform=Platform.SENSOR,
entity_description=MatterSensorEntityDescription(
key="ThermostatSetpointChangeAmount",
translation_key="setpoint_change_amount",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
suggested_display_precision=1,
device_class=SensorDeviceClass.TEMPERATURE,
device_to_ha=lambda x: x / TEMPERATURE_SCALING_FACTOR,
state_class=SensorStateClass.MEASUREMENT,
),
entity_class=MatterSensor,
required_attributes=(clusters.Thermostat.Attributes.SetpointChangeAmount,),
device_type=(device_types.Thermostat,),
),
]

View File

@@ -223,9 +223,6 @@
"pump_setpoint": {
"name": "Setpoint"
},
"setpoint_change_source_timestamp": {
"name": "Last change"
},
"temperature_offset": {
"name": "Temperature offset"
},
@@ -521,20 +518,6 @@
"rms_voltage": {
"name": "Effective voltage"
},
"setpoint_change_amount": {
"name": "Last change amount"
},
"setpoint_change_source": {
"name": "Last change source",
"state": {
"external": "External",
"manual": "Manual",
"schedule": "Schedule"
}
},
"setpoint_change_timestamp": {
"name": "Last change"
},
"switch_current_position": {
"name": "Current switch position"
},

File diff suppressed because it is too large Load Diff

View File

@@ -36,8 +36,8 @@ from .const import (
COFFEE_SYSTEM_PROFILE,
DISABLED_TEMP_ENTITIES,
DOMAIN,
PROGRAM_IDS,
PROGRAM_PHASE,
STATE_PROGRAM_ID,
STATE_STATUS_TAGS,
MieleAppliance,
PlatePowerStep,
@@ -979,16 +979,21 @@ class MieleProgramIdSensor(MieleSensor):
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return (
PROGRAM_IDS[self.device.device_type](self.device.state_program_id).name
if self.device.device_type in PROGRAM_IDS
else None
ret_val = STATE_PROGRAM_ID.get(self.device.device_type, {}).get(
self.device.state_program_id
)
if ret_val is None:
_LOGGER.debug(
"Unknown program id: %s on device type: %s",
self.device.state_program_id,
self.device.device_type,
)
return ret_val
@property
def options(self) -> list[str]:
"""Return the options list for the actual device type."""
return sorted(PROGRAM_IDS.get(self.device.device_type, {}).keys())
return sorted(set(STATE_PROGRAM_ID.get(self.device.device_type, {}).values()))
class MieleTimeSensor(MieleRestorableSensor):

View File

@@ -430,7 +430,7 @@
"custom_program_9": "Custom program 9",
"dark_garments": "Dark garments",
"dark_mixed_grain_bread": "Dark mixed grain bread",
"decrystallise_honey": "Decrystallize honey",
"decrystallise_honey": "Decrystallise honey",
"defrost": "Defrost",
"defrosting_with_microwave": "Defrosting with microwave",
"defrosting_with_steam": "Defrosting with steam",

View File

@@ -41,11 +41,9 @@ from .const import (
DATA_CONFIG_ENTRIES,
DATA_DELETED_IDS,
DATA_DEVICES,
DATA_PENDING_UPDATES,
DATA_PUSH_CHANNEL,
DATA_STORE,
DOMAIN,
SENSOR_TYPES,
STORAGE_KEY,
STORAGE_VERSION,
)
@@ -77,7 +75,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
DATA_DEVICES: {},
DATA_PUSH_CHANNEL: {},
DATA_STORE: store,
DATA_PENDING_UPDATES: {sensor_type: {} for sensor_type in SENSOR_TYPES},
}
hass.http.register_view(RegistrationsView())

View File

@@ -4,7 +4,7 @@ from typing import Any
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_WEBHOOK_ID, STATE_ON, STATE_UNKNOWN
from homeassistant.const import CONF_WEBHOOK_ID, STATE_ON
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.dispatcher import async_dispatcher_connect
@@ -75,9 +75,8 @@ class MobileAppBinarySensor(MobileAppEntity, BinarySensorEntity):
async def async_restore_last_state(self, last_state: State) -> None:
"""Restore previous state."""
if self._config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
await super().async_restore_last_state(last_state)
self._config[ATTR_SENSOR_STATE] = last_state.state == STATE_ON
await super().async_restore_last_state(last_state)
self._config[ATTR_SENSOR_STATE] = last_state.state == STATE_ON
self._async_update_attr_from_config()
@callback

View File

@@ -20,7 +20,6 @@ DATA_DEVICES = "devices"
DATA_STORE = "store"
DATA_NOTIFY = "notify"
DATA_PUSH_CHANNEL = "push_channel"
DATA_PENDING_UPDATES = "pending_updates"
ATTR_APP_DATA = "app_data"
ATTR_APP_ID = "app_id"
@@ -95,5 +94,3 @@ SCHEMA_APP_DATA = vol.Schema(
},
extra=vol.ALLOW_EXTRA,
)
SENSOR_TYPES = (ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR)

View File

@@ -2,16 +2,10 @@
from __future__ import annotations
import logging
from typing import Any
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_ICON,
CONF_NAME,
CONF_UNIQUE_ID,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.const import ATTR_ICON, CONF_NAME, CONF_UNIQUE_ID, STATE_UNAVAILABLE
from homeassistant.core import State, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.restore_state import RestoreEntity
@@ -24,15 +18,10 @@ from .const import (
ATTR_SENSOR_ICON,
ATTR_SENSOR_STATE,
ATTR_SENSOR_STATE_CLASS,
ATTR_SENSOR_TYPE,
DATA_PENDING_UPDATES,
DOMAIN,
SIGNAL_SENSOR_UPDATE,
)
from .helpers import device_info
_LOGGER = logging.getLogger(__name__)
class MobileAppEntity(RestoreEntity):
"""Representation of a mobile app entity."""
@@ -67,14 +56,11 @@ class MobileAppEntity(RestoreEntity):
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_SENSOR_UPDATE}-{self._config[ATTR_SENSOR_TYPE]}-{self._attr_unique_id}",
f"{SIGNAL_SENSOR_UPDATE}-{self._attr_unique_id}",
self._handle_update,
)
)
# Apply any pending updates
self._handle_update()
if (state := await self.async_get_last_state()) is None:
return
@@ -83,16 +69,13 @@ class MobileAppEntity(RestoreEntity):
async def async_restore_last_state(self, last_state: State) -> None:
"""Restore previous state."""
config = self._config
# Only restore state if we don't have one already, since it can be set by a pending update
if config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
config[ATTR_SENSOR_STATE] = last_state.state
config[ATTR_SENSOR_ATTRIBUTES] = {
**last_state.attributes,
**self._config[ATTR_SENSOR_ATTRIBUTES],
}
if ATTR_ICON in last_state.attributes:
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
config[ATTR_SENSOR_STATE] = last_state.state
config[ATTR_SENSOR_ATTRIBUTES] = {
**last_state.attributes,
**self._config[ATTR_SENSOR_ATTRIBUTES],
}
if ATTR_ICON in last_state.attributes:
config[ATTR_SENSOR_ICON] = last_state.attributes[ATTR_ICON]
@property
def device_info(self):
@@ -100,21 +83,8 @@ class MobileAppEntity(RestoreEntity):
return device_info(self._registration)
@callback
def _handle_update(self) -> None:
def _handle_update(self, data: dict[str, Any]) -> None:
"""Handle async event updates."""
self._apply_pending_update()
self._config.update(data)
self._async_update_attr_from_config()
self.async_write_ha_state()
def _apply_pending_update(self) -> None:
"""Restore any pending update for this entity."""
entity_type = self._config[ATTR_SENSOR_TYPE]
pending_updates = self.hass.data[DOMAIN][DATA_PENDING_UPDATES][entity_type]
if update := pending_updates.pop(self._attr_unique_id, None):
_LOGGER.debug(
"Applying pending update for %s: %s",
self._attr_unique_id,
update,
)
# Apply the pending update
self._config.update(update)

View File

@@ -86,26 +86,24 @@ class MobileAppSensor(MobileAppEntity, RestoreSensor):
async def async_restore_last_state(self, last_state: State) -> None:
"""Restore previous state."""
await super().async_restore_last_state(last_state)
config = self._config
if config[ATTR_SENSOR_STATE] in (None, STATE_UNKNOWN):
await super().async_restore_last_state(last_state)
if not (last_sensor_data := await self.async_get_last_sensor_data()):
# Workaround to handle migration to RestoreSensor, can be removed
# in HA Core 2023.4
config[ATTR_SENSOR_STATE] = None
webhook_id = self._entry.data[CONF_WEBHOOK_ID]
if TYPE_CHECKING:
assert self.unique_id is not None
sensor_unique_id = _extract_sensor_unique_id(webhook_id, self.unique_id)
if (
self.device_class == SensorDeviceClass.TEMPERATURE
and sensor_unique_id == "battery_temperature"
):
config[ATTR_SENSOR_UOM] = UnitOfTemperature.CELSIUS
else:
config[ATTR_SENSOR_STATE] = last_sensor_data.native_value
config[ATTR_SENSOR_UOM] = last_sensor_data.native_unit_of_measurement
if not (last_sensor_data := await self.async_get_last_sensor_data()):
# Workaround to handle migration to RestoreSensor, can be removed
# in HA Core 2023.4
config[ATTR_SENSOR_STATE] = None
webhook_id = self._entry.data[CONF_WEBHOOK_ID]
if TYPE_CHECKING:
assert self.unique_id is not None
sensor_unique_id = _extract_sensor_unique_id(webhook_id, self.unique_id)
if (
self.device_class == SensorDeviceClass.TEMPERATURE
and sensor_unique_id == "battery_temperature"
):
config[ATTR_SENSOR_UOM] = UnitOfTemperature.CELSIUS
else:
config[ATTR_SENSOR_STATE] = last_sensor_data.native_value
config[ATTR_SENSOR_UOM] = last_sensor_data.native_unit_of_measurement
self._async_update_attr_from_config()

View File

@@ -79,6 +79,7 @@ from .const import (
ATTR_SENSOR_STATE,
ATTR_SENSOR_STATE_CLASS,
ATTR_SENSOR_TYPE,
ATTR_SENSOR_TYPE_BINARY_SENSOR,
ATTR_SENSOR_TYPE_SENSOR,
ATTR_SENSOR_UNIQUE_ID,
ATTR_SENSOR_UOM,
@@ -97,14 +98,12 @@ from .const import (
DATA_CONFIG_ENTRIES,
DATA_DELETED_IDS,
DATA_DEVICES,
DATA_PENDING_UPDATES,
DOMAIN,
ERR_ENCRYPTION_ALREADY_ENABLED,
ERR_ENCRYPTION_REQUIRED,
ERR_INVALID_FORMAT,
ERR_SENSOR_NOT_REGISTERED,
SCHEMA_APP_DATA,
SENSOR_TYPES,
SIGNAL_LOCATION_UPDATE,
SIGNAL_SENSOR_UPDATE,
)
@@ -126,6 +125,8 @@ WEBHOOK_COMMANDS: Registry[
str, Callable[[HomeAssistant, ConfigEntry, Any], Coroutine[Any, Any, Response]]
] = Registry()
SENSOR_TYPES = (ATTR_SENSOR_TYPE_BINARY_SENSOR, ATTR_SENSOR_TYPE_SENSOR)
WEBHOOK_PAYLOAD_SCHEMA = vol.Any(
vol.Schema(
{
@@ -600,16 +601,14 @@ async def webhook_register_sensor(
if changes:
entity_registry.async_update_entity(existing_sensor, **changes)
_async_update_sensor_entity(
hass, entity_type=entity_type, unique_store_key=unique_store_key, data=data
)
async_dispatcher_send(hass, f"{SIGNAL_SENSOR_UPDATE}-{unique_store_key}", data)
else:
data[CONF_UNIQUE_ID] = unique_store_key
data[CONF_NAME] = (
f"{config_entry.data[ATTR_DEVICE_NAME]} {data[ATTR_SENSOR_NAME]}"
)
register_signal = f"{DOMAIN}_{entity_type}_register"
register_signal = f"{DOMAIN}_{data[ATTR_SENSOR_TYPE]}_register"
async_dispatcher_send(hass, register_signal, data)
return webhook_response(
@@ -686,12 +685,10 @@ async def webhook_update_sensor_states(
continue
sensor[CONF_WEBHOOK_ID] = config_entry.data[CONF_WEBHOOK_ID]
_async_update_sensor_entity(
async_dispatcher_send(
hass,
entity_type=entity_type,
unique_store_key=unique_store_key,
data=sensor,
f"{SIGNAL_SENSOR_UPDATE}-{unique_store_key}",
sensor,
)
resp[unique_id] = {"success": True}
@@ -700,26 +697,11 @@ async def webhook_update_sensor_states(
entry = entity_registry.async_get(entity_id)
if entry and entry.disabled_by:
# Inform the app that the entity is disabled
resp[unique_id]["is_disabled"] = True
return webhook_response(resp, registration=config_entry.data)
def _async_update_sensor_entity(
hass: HomeAssistant, entity_type: str, unique_store_key: str, data: dict[str, Any]
) -> None:
"""Update a sensor entity with new data."""
# Replace existing pending update with the latest sensor data.
hass.data[DOMAIN][DATA_PENDING_UPDATES][entity_type][unique_store_key] = data
# The signal might not be handled if the entity was just enabled, but the data is stored
# in pending updates and will be applied on entity initialization.
async_dispatcher_send(
hass, f"{SIGNAL_SENSOR_UPDATE}-{entity_type}-{unique_store_key}"
)
@WEBHOOK_COMMANDS.register("get_zones")
async def webhook_get_zones(
hass: HomeAssistant, config_entry: ConfigEntry, data: Any

View File

@@ -239,7 +239,6 @@ from .const import (
CONF_OSCILLATION_COMMAND_TOPIC,
CONF_OSCILLATION_STATE_TOPIC,
CONF_OSCILLATION_VALUE_TEMPLATE,
CONF_PATTERN,
CONF_PAYLOAD_ARM_AWAY,
CONF_PAYLOAD_ARM_CUSTOM_BYPASS,
CONF_PAYLOAD_ARM_HOME,
@@ -466,7 +465,6 @@ SUBENTRY_PLATFORMS = [
Platform.SENSOR,
Platform.SIREN,
Platform.SWITCH,
Platform.TEXT,
]
_CODE_VALIDATION_MODE = {
@@ -821,16 +819,6 @@ TEMPERATURE_UNIT_SELECTOR = SelectSelector(
mode=SelectSelectorMode.DROPDOWN,
)
)
TEXT_MODE_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=[TextSelectorType.TEXT.value, TextSelectorType.PASSWORD.value],
mode=SelectSelectorMode.DROPDOWN,
translation_key="text_mode",
)
)
TEXT_SIZE_SELECTOR = NumberSelector(
NumberSelectorConfig(min=0, max=255, step=1, mode=NumberSelectorMode.BOX)
)
@callback
@@ -1163,22 +1151,6 @@ def validate_sensor_platform_config(
return errors
@callback
def validate_text_platform_config(
config: dict[str, Any],
) -> dict[str, str]:
"""Validate the text entity options."""
errors: dict[str, str] = {}
if (
CONF_MIN in config
and CONF_MAX in config
and config[CONF_MIN] > config[CONF_MAX]
):
errors["text_advanced_settings"] = "max_below_min"
return errors
ENTITY_CONFIG_VALIDATOR: dict[
str,
Callable[[dict[str, Any]], dict[str, str]] | None,
@@ -1198,7 +1170,6 @@ ENTITY_CONFIG_VALIDATOR: dict[
Platform.SENSOR: validate_sensor_platform_config,
Platform.SIREN: None,
Platform.SWITCH: None,
Platform.TEXT: validate_text_platform_config,
}
@@ -1459,7 +1430,6 @@ PLATFORM_ENTITY_FIELDS: dict[Platform, dict[str, PlatformField]] = {
selector=SWITCH_DEVICE_CLASS_SELECTOR, required=False
),
},
Platform.TEXT: {},
}
PLATFORM_MQTT_FIELDS: dict[Platform, dict[str, PlatformField]] = {
Platform.ALARM_CONTROL_PANEL: {
@@ -3328,58 +3298,6 @@ PLATFORM_MQTT_FIELDS: dict[Platform, dict[str, PlatformField]] = {
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
CONF_OPTIMISTIC: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
},
Platform.TEXT: {
CONF_COMMAND_TOPIC: PlatformField(
selector=TEXT_SELECTOR,
required=True,
validator=valid_publish_topic,
error="invalid_publish_topic",
),
CONF_COMMAND_TEMPLATE: PlatformField(
selector=TEMPLATE_SELECTOR,
required=False,
validator=validate(cv.template),
error="invalid_template",
),
CONF_STATE_TOPIC: PlatformField(
selector=TEXT_SELECTOR,
required=False,
validator=valid_subscribe_topic,
error="invalid_subscribe_topic",
),
CONF_VALUE_TEMPLATE: PlatformField(
selector=TEMPLATE_SELECTOR,
required=False,
validator=validate(cv.template),
error="invalid_template",
),
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
CONF_MIN: PlatformField(
selector=TEXT_SIZE_SELECTOR,
required=True,
default=0,
section="text_advanced_settings",
),
CONF_MAX: PlatformField(
selector=TEXT_SIZE_SELECTOR,
required=True,
default=255,
section="text_advanced_settings",
),
CONF_MODE: PlatformField(
selector=TEXT_MODE_SELECTOR,
required=True,
default=TextSelectorType.TEXT.value,
section="text_advanced_settings",
),
CONF_PATTERN: PlatformField(
selector=TEXT_SELECTOR,
required=False,
validator=validate(cv.is_regex),
error="invalid_regular_expression",
section="text_advanced_settings",
),
},
}
MQTT_DEVICE_PLATFORM_FIELDS = {
ATTR_NAME: PlatformField(selector=TEXT_SELECTOR, required=True),

View File

@@ -138,7 +138,6 @@ CONF_OSCILLATION_COMMAND_TOPIC = "oscillation_command_topic"
CONF_OSCILLATION_COMMAND_TEMPLATE = "oscillation_command_template"
CONF_OSCILLATION_STATE_TOPIC = "oscillation_state_topic"
CONF_OSCILLATION_VALUE_TEMPLATE = "oscillation_value_template"
CONF_PATTERN = "pattern"
CONF_PAYLOAD_ARM_AWAY = "payload_arm_away"
CONF_PAYLOAD_ARM_CUSTOM_BYPASS = "payload_arm_custom_bypass"
CONF_PAYLOAD_ARM_HOME = "payload_arm_home"

View File

@@ -970,21 +970,6 @@
"temperature_state_topic": "The MQTT topic to subscribe for changes of the target temperature. [Learn more.]({url}#temperature_state_topic)"
},
"name": "Target temperature settings"
},
"text_advanced_settings": {
"data": {
"max": "Maximum length",
"min": "Mininum length",
"mode": "Mode",
"pattern": "Pattern"
},
"data_description": {
"max": "Maximum length of the text input",
"min": "Mininum length of the text input",
"mode": "Mode of the text input",
"pattern": "A valid regex pattern"
},
"name": "Advanced text settings"
}
},
"title": "Configure MQTT device \"{mqtt_device}\""
@@ -1402,8 +1387,7 @@
"select": "[%key:component::select::title%]",
"sensor": "[%key:component::sensor::title%]",
"siren": "[%key:component::siren::title%]",
"switch": "[%key:component::switch::title%]",
"text": "[%key:component::text::title%]"
"switch": "[%key:component::switch::title%]"
}
},
"set_ca_cert": {
@@ -1440,12 +1424,6 @@
"none": "No target temperature",
"single": "Single target temperature"
}
},
"text_mode": {
"options": {
"password": "[%key:common::config_flow::data::password%]",
"text": "[%key:component::text::entity_component::_::state_attributes::mode::state::text%]"
}
}
},
"services": {

View File

@@ -27,14 +27,7 @@ from homeassistant.helpers.typing import ConfigType, VolSchemaType
from . import subscription
from .config import MQTT_RW_SCHEMA
from .const import (
CONF_COMMAND_TEMPLATE,
CONF_COMMAND_TOPIC,
CONF_MAX,
CONF_MIN,
CONF_PATTERN,
CONF_STATE_TOPIC,
)
from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_STATE_TOPIC
from .entity import MqttEntity, async_setup_entity_entry_helper
from .models import (
MqttCommandTemplate,
@@ -49,7 +42,12 @@ _LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
CONF_MAX = "max"
CONF_MIN = "min"
CONF_PATTERN = "pattern"
DEFAULT_NAME = "MQTT Text"
DEFAULT_PAYLOAD_RESET = "None"
MQTT_TEXT_ATTRIBUTES_BLOCKED = frozenset(
{

View File

@@ -27,8 +27,7 @@ from homeassistant.helpers.issue_registry import (
)
from .const import ATTR_CONF_EXPOSE_PLAYER_TO_HA, DOMAIN, LOGGER
from .helpers import get_music_assistant_client
from .services import register_actions
from .services import get_music_assistant_client, register_actions
if TYPE_CHECKING:
from music_assistant_models.event import MassEvent

View File

@@ -4,18 +4,11 @@ from __future__ import annotations
from collections.abc import Callable, Coroutine
import functools
from typing import TYPE_CHECKING, Any
from typing import Any
from music_assistant_models.errors import MusicAssistantError
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
if TYPE_CHECKING:
from music_assistant_client import MusicAssistantClient
from . import MusicAssistantConfigEntry
from homeassistant.exceptions import HomeAssistantError
def catch_musicassistant_error[**_P, _R](
@@ -33,16 +26,3 @@ def catch_musicassistant_error[**_P, _R](
raise HomeAssistantError(error_msg) from err
return wrapper
@callback
def get_music_assistant_client(
hass: HomeAssistant, config_entry_id: str
) -> MusicAssistantClient:
"""Get the Music Assistant client for the given config entry."""
entry: MusicAssistantConfigEntry | None
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
raise ServiceValidationError("Entry not found")
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError("Entry not loaded")
return entry.runtime_data.mass

View File

@@ -22,9 +22,11 @@ from music_assistant_models.errors import MediaNotFoundError
from music_assistant_models.event import MassEvent
from music_assistant_models.media_items import ItemMapping, MediaItemType, Track
from music_assistant_models.player_queue import PlayerQueue
import voluptuous as vol
from homeassistant.components import media_source
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_EXTRA,
BrowseMedia,
MediaPlayerDeviceClass,
@@ -39,26 +41,38 @@ from homeassistant.components.media_player import (
async_process_play_media_url,
)
from homeassistant.const import ATTR_NAME, STATE_OFF, Platform
from homeassistant.core import HomeAssistant, ServiceResponse
from homeassistant.core import HomeAssistant, ServiceResponse, SupportsResponse
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
async_get_current_platform,
)
from homeassistant.util.dt import utc_from_timestamp
from . import MusicAssistantConfigEntry
from .const import (
ATTR_ACTIVE,
ATTR_ACTIVE_QUEUE,
ATTR_ALBUM,
ATTR_ANNOUNCE_VOLUME,
ATTR_ARTIST,
ATTR_AUTO_PLAY,
ATTR_CURRENT_INDEX,
ATTR_CURRENT_ITEM,
ATTR_ELAPSED_TIME,
ATTR_ITEMS,
ATTR_MASS_PLAYER_TYPE,
ATTR_MEDIA_ID,
ATTR_MEDIA_TYPE,
ATTR_NEXT_ITEM,
ATTR_QUEUE_ID,
ATTR_RADIO_MODE,
ATTR_REPEAT_MODE,
ATTR_SHUFFLE_ENABLED,
ATTR_SOURCE_PLAYER,
ATTR_URL,
ATTR_USE_PRE_ANNOUNCE,
DOMAIN,
)
from .entity import MusicAssistantEntity
@@ -108,6 +122,11 @@ REPEAT_MODE_MAPPING_TO_HA = {
# UNKNOWN is intentionally not mapped - will return None
}
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
SERVICE_TRANSFER_QUEUE = "transfer_queue"
SERVICE_GET_QUEUE = "get_queue"
async def async_setup_entry(
hass: HomeAssistant,
@@ -124,6 +143,44 @@ async def async_setup_entry(
# register callback to add players when they are discovered
entry.runtime_data.platform_handlers.setdefault(Platform.MEDIA_PLAYER, add_player)
# add platform service for play_media with advanced options
platform = async_get_current_platform()
platform.async_register_entity_service(
SERVICE_PLAY_MEDIA_ADVANCED,
{
vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType),
vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption),
vol.Optional(ATTR_ARTIST): cv.string,
vol.Optional(ATTR_ALBUM): cv.string,
vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool),
},
"_async_handle_play_media",
)
platform.async_register_entity_service(
SERVICE_PLAY_ANNOUNCEMENT,
{
vol.Required(ATTR_URL): cv.string,
vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool),
vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int),
},
"_async_handle_play_announcement",
)
platform.async_register_entity_service(
SERVICE_TRANSFER_QUEUE,
{
vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id,
vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool),
},
"_async_handle_transfer_queue",
)
platform.async_register_entity_service(
SERVICE_GET_QUEUE,
schema=None,
func="_async_handle_get_queue",
supports_response=SupportsResponse.ONLY,
)
class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
"""Representation of MediaPlayerEntity from Music Assistant Player."""

View File

@@ -4,13 +4,10 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from music_assistant_models.enums import MediaType, QueueOption
from music_assistant_models.enums import MediaType
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE,
DOMAIN as MEDIA_PLAYER_DOMAIN,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
from homeassistant.core import (
HomeAssistant,
@@ -20,41 +17,31 @@ from homeassistant.core import (
callback,
)
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers import config_validation as cv
from .const import (
ATTR_ALBUM,
ATTR_ALBUM_ARTISTS_ONLY,
ATTR_ALBUM_TYPE,
ATTR_ALBUMS,
ATTR_ANNOUNCE_VOLUME,
ATTR_ARTIST,
ATTR_ARTISTS,
ATTR_AUDIOBOOKS,
ATTR_AUTO_PLAY,
ATTR_FAVORITE,
ATTR_ITEMS,
ATTR_LIBRARY_ONLY,
ATTR_LIMIT,
ATTR_MEDIA_ID,
ATTR_MEDIA_TYPE,
ATTR_OFFSET,
ATTR_ORDER_BY,
ATTR_PLAYLISTS,
ATTR_PODCASTS,
ATTR_RADIO,
ATTR_RADIO_MODE,
ATTR_SEARCH,
ATTR_SEARCH_ALBUM,
ATTR_SEARCH_ARTIST,
ATTR_SEARCH_NAME,
ATTR_SOURCE_PLAYER,
ATTR_TRACKS,
ATTR_URL,
ATTR_USE_PRE_ANNOUNCE,
DOMAIN,
)
from .helpers import get_music_assistant_client
from .schemas import (
LIBRARY_RESULTS_SCHEMA,
SEARCH_RESULT_SCHEMA,
@@ -62,6 +49,7 @@ from .schemas import (
)
if TYPE_CHECKING:
from music_assistant_client import MusicAssistantClient
from music_assistant_models.media_items import (
Album,
Artist,
@@ -72,18 +60,28 @@ if TYPE_CHECKING:
Track,
)
from . import MusicAssistantConfigEntry
SERVICE_SEARCH = "search"
SERVICE_GET_LIBRARY = "get_library"
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
SERVICE_TRANSFER_QUEUE = "transfer_queue"
SERVICE_GET_QUEUE = "get_queue"
DEFAULT_OFFSET = 0
DEFAULT_LIMIT = 25
DEFAULT_SORT_ORDER = "name"
@callback
def get_music_assistant_client(
hass: HomeAssistant, config_entry_id: str
) -> MusicAssistantClient:
"""Get the Music Assistant client for the given config entry."""
entry: MusicAssistantConfigEntry | None
if not (entry := hass.config_entries.async_get_entry(config_entry_id)):
raise ServiceValidationError("Entry not found")
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError("Entry not loaded")
return entry.runtime_data.mass
@callback
def register_actions(hass: HomeAssistant) -> None:
"""Register custom actions."""
@@ -126,55 +124,6 @@ def register_actions(hass: HomeAssistant) -> None:
supports_response=SupportsResponse.ONLY,
)
# Platform entity services
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_PLAY_MEDIA_ADVANCED,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={
vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType),
vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption),
vol.Optional(ATTR_ARTIST): cv.string,
vol.Optional(ATTR_ALBUM): cv.string,
vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool),
},
func="_async_handle_play_media",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_PLAY_ANNOUNCEMENT,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={
vol.Required(ATTR_URL): cv.string,
vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool),
vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int),
},
func="_async_handle_play_announcement",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_TRANSFER_QUEUE,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={
vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id,
vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool),
},
func="_async_handle_transfer_queue",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_GET_QUEUE,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema=None,
func="_async_handle_get_queue",
supports_response=SupportsResponse.ONLY,
)
async def handle_search(call: ServiceCall) -> ServiceResponse:
"""Handle queue_command action."""

View File

@@ -13,7 +13,7 @@ from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: NSConfigEntry) -> bool:

View File

@@ -1,120 +0,0 @@
"""Support for Nederlandse Spoorwegen public transport."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
import logging
from ns_api import Trip
from homeassistant.components.binary_sensor import (
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, INTEGRATION_TITLE, ROUTE_MODEL
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0 # since we use coordinator pattern
@dataclass(frozen=True, kw_only=True)
class NSBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes Nederlandse Spoorwegen sensor entity."""
value_fn: Callable[[Trip], bool]
def get_delay(planned: datetime | None, actual: datetime | None) -> bool:
"""Return True if delay is present, False otherwise."""
return bool(planned and actual and planned != actual)
BINARY_SENSOR_DESCRIPTIONS = [
NSBinarySensorEntityDescription(
key="is_departure_delayed",
translation_key="is_departure_delayed",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda trip: get_delay(
trip.departure_time_planned, trip.departure_time_actual
),
entity_registry_enabled_default=False,
),
NSBinarySensorEntityDescription(
key="is_arrival_delayed",
translation_key="is_arrival_delayed",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda trip: get_delay(
trip.arrival_time_planned, trip.arrival_time_actual
),
entity_registry_enabled_default=False,
),
NSBinarySensorEntityDescription(
key="is_going",
translation_key="is_going",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda trip: trip.going,
entity_registry_enabled_default=False,
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: NSConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the departure sensor from a config entry."""
coordinators = config_entry.runtime_data
for subentry_id, coordinator in coordinators.items():
async_add_entities(
(
NSBinarySensor(coordinator, subentry_id, description)
for description in BINARY_SENSOR_DESCRIPTIONS
),
config_subentry_id=subentry_id,
)
class NSBinarySensor(CoordinatorEntity[NSDataUpdateCoordinator], BinarySensorEntity):
"""Generic NS binary sensor based on entity description."""
_attr_has_entity_name = True
_attr_attribution = "Data provided by NS"
entity_description: NSBinarySensorEntityDescription
def __init__(
self,
coordinator: NSDataUpdateCoordinator,
subentry_id: str,
description: NSBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator)
self.entity_description = description
self._subentry_id = subentry_id
self._attr_unique_id = f"{subentry_id}-{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, subentry_id)},
name=coordinator.name,
manufacturer=INTEGRATION_TITLE,
model=ROUTE_MODEL,
)
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
if not (trip := self.coordinator.data.first_trip):
return None
return self.entity_description.value_fn(trip)

View File

@@ -1,15 +0,0 @@
{
"entity": {
"binary_sensor": {
"is_arrival_delayed": {
"default": "mdi:bell-alert-outline"
},
"is_departure_delayed": {
"default": "mdi:bell-alert-outline"
},
"is_going": {
"default": "mdi:bell-cancel-outline"
}
}
}
}

View File

@@ -6,7 +6,6 @@ from datetime import datetime
import logging
from typing import Any
from ns_api import Trip
import voluptuous as vol
from homeassistant.components.sensor import (
@@ -39,33 +38,6 @@ from .const import (
)
from .coordinator import NSConfigEntry, NSDataUpdateCoordinator
def _get_departure_time(trip: Trip | None) -> datetime | None:
"""Get next departure time from trip data."""
return trip.departure_time_actual or trip.departure_time_planned if trip else None
def _get_time_str(time: datetime | None) -> str | None:
"""Get time as string."""
return time.strftime("%H:%M") if time else None
def _get_route(trip: Trip | None) -> list[str]:
"""Get the route as a list of station names from trip data."""
if not trip or not (trip_parts := trip.trip_parts):
return []
route = []
if departure := trip.departure:
route.append(departure)
route.extend(part.destination for part in trip_parts)
return route
def _get_delay(planned: datetime | None, actual: datetime | None) -> bool:
"""Return True if delay is present, False otherwise."""
return bool(planned and actual and planned != actual)
_LOGGER = logging.getLogger(__name__)
ROUTE_SCHEMA = vol.Schema(
@@ -155,7 +127,7 @@ async def async_setup_entry(
class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity):
"""Implementation of a NS Departure Sensor (legacy)."""
"""Implementation of a NS Departure Sensor."""
_attr_device_class = SensorDeviceClass.TIMESTAMP
_attr_attribution = "Data provided by NS"
@@ -191,40 +163,94 @@ class NSDepartureSensor(CoordinatorEntity[NSDataUpdateCoordinator], SensorEntity
return None
first_trip = route_data.first_trip
return _get_departure_time(first_trip)
if first_trip.departure_time_actual:
return first_trip.departure_time_actual
return first_trip.departure_time_planned
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes."""
first_trip = self.coordinator.data.first_trip
next_trip = self.coordinator.data.next_trip
route_data = self.coordinator.data
if not route_data:
return None
first_trip = route_data.first_trip
next_trip = route_data.next_trip
if not first_trip:
return None
status = first_trip.status
route = []
if first_trip.trip_parts:
route = [first_trip.departure]
route.extend(k.destination for k in first_trip.trip_parts)
return {
# Static attributes
attributes = {
"going": first_trip.going,
"departure_time_planned": _get_time_str(first_trip.departure_time_planned),
"departure_time_actual": _get_time_str(first_trip.departure_time_actual),
"departure_delay": _get_delay(
first_trip.departure_time_planned,
first_trip.departure_time_actual,
),
"departure_time_planned": None,
"departure_time_actual": None,
"departure_delay": False,
"departure_platform_planned": first_trip.departure_platform_planned,
"departure_platform_actual": first_trip.departure_platform_actual,
"arrival_time_planned": _get_time_str(first_trip.arrival_time_planned),
"arrival_time_actual": _get_time_str(first_trip.arrival_time_actual),
"arrival_delay": _get_delay(
first_trip.arrival_time_planned,
first_trip.arrival_time_actual,
),
"arrival_time_planned": None,
"arrival_time_actual": None,
"arrival_delay": False,
"arrival_platform_planned": first_trip.arrival_platform_planned,
"arrival_platform_actual": first_trip.arrival_platform_actual,
"next": _get_time_str(_get_departure_time(next_trip)),
"status": status.lower() if status else None,
"next": None,
"status": first_trip.status.lower() if first_trip.status else None,
"transfers": first_trip.nr_transfers,
"route": _get_route(first_trip),
"route": route,
"remarks": None,
}
# Planned departure attributes
if first_trip.departure_time_planned is not None:
attributes["departure_time_planned"] = (
first_trip.departure_time_planned.strftime("%H:%M")
)
# Actual departure attributes
if first_trip.departure_time_actual is not None:
attributes["departure_time_actual"] = (
first_trip.departure_time_actual.strftime("%H:%M")
)
# Delay departure attributes
if (
attributes["departure_time_planned"]
and attributes["departure_time_actual"]
and attributes["departure_time_planned"]
!= attributes["departure_time_actual"]
):
attributes["departure_delay"] = True
# Planned arrival attributes
if first_trip.arrival_time_planned is not None:
attributes["arrival_time_planned"] = (
first_trip.arrival_time_planned.strftime("%H:%M")
)
# Actual arrival attributes
if first_trip.arrival_time_actual is not None:
attributes["arrival_time_actual"] = first_trip.arrival_time_actual.strftime(
"%H:%M"
)
# Delay arrival attributes
if (
attributes["arrival_time_planned"]
and attributes["arrival_time_actual"]
and attributes["arrival_time_planned"] != attributes["arrival_time_actual"]
):
attributes["arrival_delay"] = True
# Next trip attributes
if next_trip:
if next_trip.departure_time_actual is not None:
attributes["next"] = next_trip.departure_time_actual.strftime("%H:%M")
elif next_trip.departure_time_planned is not None:
attributes["next"] = next_trip.departure_time_planned.strftime("%H:%M")
return attributes

View File

@@ -64,19 +64,6 @@
}
}
},
"entity": {
"binary_sensor": {
"is_arrival_delayed": {
"name": "Arrival delayed"
},
"is_departure_delayed": {
"name": "Departure delayed"
},
"is_going": {
"name": "Going"
}
}
},
"issues": {
"deprecated_yaml_import_issue_cannot_connect": {
"description": "Configuring Nederlandse Spoorwegen using YAML sensor platform is deprecated.\n\nWhile importing your configuration, Home Assistant could not connect to the NS API. Please check your internet connection and the status of the NS API, then restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI.",

View File

@@ -11,6 +11,7 @@ from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, PLATFORMS
from .coordinator import (
OhmeAdvancedSettingsCoordinator,
OhmeChargeSessionCoordinator,
OhmeConfigEntry,
OhmeDeviceInfoCoordinator,
@@ -55,6 +56,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: OhmeConfigEntry) -> bool
coordinators = (
OhmeChargeSessionCoordinator(hass, entry, client),
OhmeAdvancedSettingsCoordinator(hass, entry, client),
OhmeDeviceInfoCoordinator(hass, entry, client),
)

View File

@@ -10,7 +10,7 @@ import logging
from ohme import ApiException, OhmeApiClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
@@ -23,6 +23,7 @@ class OhmeRuntimeData:
"""Dataclass to hold ohme coordinators."""
charge_session_coordinator: OhmeChargeSessionCoordinator
advanced_settings_coordinator: OhmeAdvancedSettingsCoordinator
device_info_coordinator: OhmeDeviceInfoCoordinator
@@ -77,6 +78,31 @@ class OhmeChargeSessionCoordinator(OhmeBaseCoordinator):
await self.client.async_get_charge_session()
class OhmeAdvancedSettingsCoordinator(OhmeBaseCoordinator):
"""Coordinator to pull settings and charger state from the API."""
coordinator_name = "Advanced Settings"
def __init__(
self, hass: HomeAssistant, config_entry: OhmeConfigEntry, client: OhmeApiClient
) -> None:
"""Initialise coordinator."""
super().__init__(hass, config_entry, client)
@callback
def _dummy_listener() -> None:
pass
# This coordinator is used by the API library to determine whether the
# charger is online and available. It is therefore required even if no
# entities are using it.
self.async_add_listener(_dummy_listener)
async def _internal_update_data(self) -> None:
"""Fetch data from API endpoint."""
await self.client.async_get_advanced_settings()
class OhmeDeviceInfoCoordinator(OhmeBaseCoordinator):
"""Coordinator to pull device info and charger settings from the API."""

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "cloud_polling",
"quality_scale": "platinum",
"requirements": ["ohme==1.6.0"]
"requirements": ["ohme==1.5.2"]
}

View File

@@ -37,7 +37,7 @@ class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription):
value_fn: Callable[[OhmeApiClient], str | int | float | None]
SENSORS = [
SENSOR_CHARGE_SESSION = [
OhmeSensorDescription(
key="status",
translation_key="status",
@@ -91,6 +91,18 @@ SENSORS = [
),
]
SENSOR_ADVANCED_SETTINGS = [
OhmeSensorDescription(
key="ct_current",
translation_key="ct_current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
value_fn=lambda client: client.power.ct_amps,
is_supported_fn=lambda client: client.ct_connected,
entity_registry_enabled_default=False,
),
]
async def async_setup_entry(
hass: HomeAssistant,
@@ -98,11 +110,16 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up sensors."""
coordinator = config_entry.runtime_data.charge_session_coordinator
coordinators = config_entry.runtime_data
coordinator_map = [
(SENSOR_CHARGE_SESSION, coordinators.charge_session_coordinator),
(SENSOR_ADVANCED_SETTINGS, coordinators.advanced_settings_coordinator),
]
async_add_entities(
OhmeSensor(coordinator, description)
for description in SENSORS
for entities, coordinator in coordinator_map
for description in entities
if description.is_supported_fn(coordinator.client)
)

View File

@@ -10,5 +10,5 @@
"iot_class": "cloud_polling",
"loggers": ["onedrive_personal_sdk"],
"quality_scale": "platinum",
"requirements": ["onedrive-personal-sdk==0.0.17"]
"requirements": ["onedrive-personal-sdk==0.0.16"]
}

View File

@@ -441,7 +441,7 @@ class SamsungTVConfigFlow(ConfigFlow, domain=DOMAIN):
def is_matching(self, other_flow: Self) -> bool:
"""Return True if other_flow is matching this flow."""
return getattr(other_flow, "_host", None) == self._host
return other_flow._host == self._host # noqa: SLF001
@callback
def _abort_if_manufacturer_is_not_samsung(self) -> None:

View File

@@ -7,11 +7,7 @@ from collections.abc import AsyncIterator, Mapping
from contextlib import asynccontextmanager
from typing import TYPE_CHECKING, Any, Final
from aioshelly.ble import get_name_from_model_id
from aioshelly.ble.manufacturer_data import (
has_rpc_over_ble,
parse_shelly_manufacturer_data,
)
from aioshelly.ble.manufacturer_data import has_rpc_over_ble
from aioshelly.ble.provisioning import async_provision_wifi, async_scan_wifi_networks
from aioshelly.block_device import BlockDevice
from aioshelly.common import ConnectionOptions, get_info
@@ -362,35 +358,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
self, discovery_info: BluetoothServiceInfoBleak
) -> ConfigFlowResult:
"""Handle bluetooth discovery."""
# Try to parse MAC address from the Bluetooth device name
# If not found, try to get it from manufacturer data
device_name = discovery_info.name
if (
not (mac := mac_address_from_name(device_name))
and (
parsed := parse_shelly_manufacturer_data(
discovery_info.manufacturer_data
)
)
and (mac_with_colons := parsed.get("mac"))
and isinstance(mac_with_colons, str)
):
# parse_shelly_manufacturer_data returns MAC with colons (e.g., "CC:BA:97:C2:D6:72")
# Convert to format without colons to match mac_address_from_name output
mac = mac_with_colons.replace(":", "")
# For devices without a Shelly name, use model name from model ID if available
# Gen3/4 devices advertise MAC address as name instead of "ShellyXXX-MACADDR"
if (
(model_id := parsed.get("model_id"))
and isinstance(model_id, int)
and (model_name := get_name_from_model_id(model_id))
):
# Remove spaces from model name (e.g., "Shelly 1 Mini Gen4" -> "Shelly1MiniGen4")
device_name = f"{model_name.replace(' ', '')}-{mac}"
else:
device_name = f"Shelly-{mac}"
if not mac:
# Parse MAC address from the Bluetooth device name
if not (mac := mac_address_from_name(discovery_info.name)):
return self.async_abort(reason="invalid_discovery_info")
# Check if RPC-over-BLE is enabled - required for WiFi provisioning
@@ -412,10 +381,10 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN):
if not self.ble_device:
return self.async_abort(reason="cannot_connect")
self.device_name = device_name
self.device_name = discovery_info.name
self.context.update(
{
"title_placeholders": {"name": device_name},
"title_placeholders": {"name": discovery_info.name},
}
)

View File

@@ -32,16 +32,11 @@ from .utils import (
async_remove_shelly_entity,
get_block_channel,
get_block_custom_name,
get_block_number_of_channels,
get_device_entry_gen,
get_rpc_custom_name,
get_rpc_component_name,
get_rpc_entity_name,
get_rpc_key,
get_rpc_key_id,
get_rpc_key_instances,
get_rpc_number_of_channels,
is_block_momentary_input,
is_block_single_device,
is_rpc_momentary_input,
)
@@ -163,7 +158,8 @@ def _async_setup_rpc_entry(
if script_name == BLE_SCRIPT_NAME:
continue
if script_events and (event_types := script_events[get_rpc_key_id(script)]):
script_id = int(script.split(":")[-1])
if script_events and (event_types := script_events[script_id]):
entities.append(ShellyRpcScriptEvent(coordinator, script, event_types))
# If a script is removed, from the device configuration, we need to remove orphaned entities
@@ -201,15 +197,13 @@ class ShellyBlockEvent(ShellyBlockEntity, EventEntity):
self._attr_event_types = list(BASIC_INPUTS_EVENTS_TYPES)
self.entity_description = description
if hasattr(self, "_attr_name") and not (
(single := is_block_single_device(coordinator.device, block))
and get_block_custom_name(coordinator.device, block)
if (
hasattr(self, "_attr_name")
and self._attr_name
and not get_block_custom_name(coordinator.device, block)
):
self._attr_translation_placeholders = {
"input_number": get_block_channel(block)
if single
and get_block_number_of_channels(coordinator.device, block) > 1
else ""
}
delattr(self, "_attr_name")
@@ -243,24 +237,22 @@ class ShellyRpcEvent(CoordinatorEntity[ShellyRpcCoordinator], EventEntity):
) -> None:
"""Initialize Shelly entity."""
super().__init__(coordinator)
self.event_id = int(key.split(":")[-1])
self._attr_device_info = get_entity_rpc_device_info(coordinator, key)
self._attr_unique_id = f"{coordinator.mac}-{key}"
self.entity_description = description
if description.key == "input":
_, component, component_id = get_rpc_key(key)
if not get_rpc_custom_name(coordinator.device, key):
self._attr_translation_placeholders = {
"input_number": component_id
if get_rpc_number_of_channels(coordinator.device, component) > 1
else ""
}
component = key.split(":")[0]
component_id = key.split(":")[-1]
if not get_rpc_component_name(coordinator.device, key) and (
component.lower() == "input" and component_id.isnumeric()
):
self._attr_translation_placeholders = {"input_number": component_id}
else:
self._attr_name = get_rpc_entity_name(coordinator.device, key)
self.event_id = int(component_id)
elif description.key == "script":
self._attr_name = get_rpc_entity_name(coordinator.device, key)
self.event_id = get_rpc_key_id(key)
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""

View File

@@ -4,9 +4,6 @@
"bluetooth": [
{
"local_name": "Shelly*"
},
{
"manufacturer_id": 2985
}
],
"codeowners": ["@bieniu", "@thecode", "@chemelli74", "@bdraco"],
@@ -16,8 +13,8 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["aioshelly"],
"quality_scale": "platinum",
"requirements": ["aioshelly==13.20.0"],
"quality_scale": "silver",
"requirements": ["aioshelly==13.17.0"],
"zeroconf": [
{
"name": "shelly*",

View File

@@ -55,7 +55,7 @@ rules:
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
entity-translations: todo
exception-translations: done
icon-translations: done
reconfiguration-flow: done

View File

@@ -1919,23 +1919,8 @@ class RpcSleepingSensor(ShellySleepingRpcAttributeEntity, RestoreSensor):
super().__init__(coordinator, key, attribute, description, entry)
self.restored_data: SensorExtraStoredData | None = None
if coordinator.device.initialized:
if hasattr(self, "_attr_name"):
delattr(self, "_attr_name")
translation_placeholders, translation_key = (
get_entity_translation_attributes(
get_rpc_channel_name(coordinator.device, key),
description.translation_key,
description.device_class,
self._default_to_device_class_name(),
)
)
if translation_placeholders:
self._attr_translation_placeholders = translation_placeholders
if translation_key:
self._attr_translation_key = translation_key
if hasattr(self, "_attr_name"):
delattr(self, "_attr_name")
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""

View File

@@ -93,8 +93,8 @@ def async_remove_shelly_entity(
entity_reg.async_remove(entity_id)
def get_block_number_of_channels(device: BlockDevice, block: Block) -> int:
"""Get number of channels."""
def get_number_of_channels(device: BlockDevice, block: Block) -> int:
"""Get number of channels for block type."""
channels = None
if block.type == "input":
@@ -154,7 +154,7 @@ def get_block_channel_name(device: BlockDevice, block: Block | None) -> str | No
if (
not block
or block.type in ("device", "light", "relay", "emeter")
or get_block_number_of_channels(device, block) == 1
or get_number_of_channels(device, block) == 1
):
return None
@@ -253,7 +253,7 @@ def get_block_input_triggers(
if not is_block_momentary_input(device.settings, block, True):
return []
if block.type == "device" or get_block_number_of_channels(device, block) == 1:
if block.type == "device" or get_number_of_channels(device, block) == 1:
subtype = "button"
else:
assert block.channel
@@ -397,13 +397,8 @@ def get_rpc_key(value: str) -> tuple[bool, str, str]:
return len(parts) > 1, parts[0], parts[-1]
def get_rpc_key_id(value: str) -> int:
"""Get id from device key."""
return int(get_rpc_key(value)[-1])
def get_rpc_custom_name(device: RpcDevice, key: str) -> str | None:
"""Get custom name from device config."""
"""Get component name from device config."""
if (
key in device.config
and key != "em:0" # workaround for Pro 3EM, we don't want to get name for em:0
@@ -414,9 +409,9 @@ def get_rpc_custom_name(device: RpcDevice, key: str) -> str | None:
return None
def get_rpc_number_of_channels(device: RpcDevice, component: str) -> int:
"""Get number of channels."""
return len(get_rpc_key_instances(device.status, component, all_lights=True))
def get_rpc_component_name(device: RpcDevice, key: str) -> str | None:
"""Get component name from device config."""
return get_rpc_custom_name(device, key)
def get_rpc_channel_name(device: RpcDevice, key: str) -> str | None:
@@ -424,15 +419,17 @@ def get_rpc_channel_name(device: RpcDevice, key: str) -> str | None:
if BLU_TRV_IDENTIFIER in key:
return None
_, component, component_id = get_rpc_key(key)
instances = len(
get_rpc_key_instances(device.status, key.split(":")[0], all_lights=True)
)
component = key.split(":")[0]
component_id = key.split(":")[-1]
if custom_name := get_rpc_custom_name(device, key):
if component in (*VIRTUAL_COMPONENTS, "input", "presencezone", "script"):
return custom_name
return (
custom_name if get_rpc_number_of_channels(device, component) == 1 else None
)
return custom_name if instances == 1 else None
if component in (*VIRTUAL_COMPONENTS, "input"):
return f"{component.title()} {component_id}"
@@ -440,14 +437,6 @@ def get_rpc_channel_name(device: RpcDevice, key: str) -> str | None:
return None
def get_rpc_key_normalized(key: str) -> str:
"""Get normalized key. Workaround for Pro EM50 and Pro 3EM."""
# workaround for Pro EM50
key = key.replace("em1data", "em1")
# workaround for Pro 3EM
return key.replace("emdata", "em")
def get_rpc_sub_device_name(
device: RpcDevice, key: str, emeter_phase: str | None = None
) -> str:
@@ -462,7 +451,11 @@ def get_rpc_sub_device_name(
if entity_name := device.config[key].get("name"):
return cast(str, entity_name)
_, component, component_id = get_rpc_key(get_rpc_key_normalized(key))
key = key.replace("emdata", "em")
key = key.replace("em1data", "em1")
component = key.split(":")[0]
component_id = key.split(":")[-1]
if component in ("cct", "rgb", "rgbw"):
return f"{device.name} {component.upper()} light {component_id}"
@@ -531,7 +524,7 @@ def get_rpc_key_instances(
def get_rpc_key_ids(keys_dict: dict[str, Any], key: str) -> list[int]:
"""Return list of key ids for RPC device from a dict."""
return [get_rpc_key_id(k) for k in keys_dict if k.startswith(f"{key}:")]
return [int(k.split(":")[1]) for k in keys_dict if k.startswith(f"{key}:")]
def get_rpc_key_by_role(keys_dict: dict[str, Any], role: str) -> str | None:
@@ -813,10 +806,11 @@ def is_rpc_exclude_from_relay(
settings: dict[str, Any], status: dict[str, Any], channel: str
) -> bool:
"""Return true if rpc channel should be excludeed from switch platform."""
ch = int(channel.split(":")[1])
if is_rpc_thermostat_internal_actuator(status):
return True
return is_rpc_channel_type_light(settings, get_rpc_key_id(channel))
return is_rpc_channel_type_light(settings, ch)
def get_shelly_air_lamp_life(lamp_seconds: int) -> float:
@@ -838,7 +832,7 @@ async def get_rpc_scripts_event_types(
if script_name in ignore_scripts:
continue
script_id = get_rpc_key_id(script)
script_id = int(script.split(":")[-1])
script_events[script_id] = await get_rpc_script_event_types(device, script_id)
return script_events
@@ -869,8 +863,14 @@ def get_rpc_device_info(
if key is None:
return DeviceInfo(connections={(CONNECTION_NETWORK_MAC, mac)})
key = get_rpc_key_normalized(key)
has_id, component, _ = get_rpc_key(key)
# workaround for Pro EM50
key = key.replace("em1data", "em1")
# workaround for Pro 3EM
key = key.replace("emdata", "em")
key_parts = key.split(":")
component = key_parts[0]
idx = key_parts[1] if len(key_parts) > 1 else None
if emeter_phase is not None:
return DeviceInfo(
@@ -889,8 +889,8 @@ def get_rpc_device_info(
component not in (*All_LIGHT_TYPES, "cover", "em1", "switch")
and get_irrigation_zone_id(device, key) is None
)
or not has_id
or get_rpc_number_of_channels(device, component) < 2
or idx is None
or len(get_rpc_key_instances(device.status, component, all_lights=True)) < 2
):
return DeviceInfo(connections={(CONNECTION_NETWORK_MAC, mac)})
@@ -923,15 +923,6 @@ def get_blu_trv_device_info(
)
def is_block_single_device(device: BlockDevice, block: Block | None = None) -> bool:
"""Return true if block is single device."""
return (
block is None
or block.type not in ("light", "relay", "emeter")
or device.settings.get("mode") == "roller"
)
def get_block_device_info(
device: BlockDevice,
mac: str,
@@ -942,14 +933,14 @@ def get_block_device_info(
suggested_area: str | None = None,
) -> DeviceInfo:
"""Return device info for Block device."""
if is_block_single_device(device, block) or (
block is not None and get_block_number_of_channels(device, block) < 2
if (
block is None
or block.type not in ("light", "relay", "emeter")
or device.settings.get("mode") == "roller"
or get_number_of_channels(device, block) < 2
):
return DeviceInfo(connections={(CONNECTION_NETWORK_MAC, mac)})
if TYPE_CHECKING:
assert block
return DeviceInfo(
identifiers={(DOMAIN, f"{mac}-{block.description}")},
name=get_block_sub_device_name(device, block),

View File

@@ -66,6 +66,8 @@ class SleepAsAndroidSensorEntity(SleepAsAndroidEntity, RestoreSensor):
if webhook_id == self.webhook_id and data[ATTR_EVENT] in (
"alarm_snooze_clicked",
"alarm_snooze_canceled",
"alarm_alert_start",
"alarm_alert_dismiss",
"alarm_skip_next",
"show_skip_next_alarm",
"alarm_rescheduled",

View File

@@ -8,17 +8,20 @@ from dataclasses import dataclass
from pysmartthings import Attribute, Capability, Category, SmartThings, Status
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import FullDevice, SmartThingsConfigEntry
from .const import INVALID_SWITCH_CATEGORIES, MAIN
from .entity import SmartThingsEntity
from .util import deprecate_entity
@dataclass(frozen=True, kw_only=True)
@@ -28,14 +31,11 @@ class SmartThingsBinarySensorEntityDescription(BinarySensorEntityDescription):
is_on_key: str
category_device_class: dict[Category | str, BinarySensorDeviceClass] | None = None
category: set[Category] | None = None
exists_fn: (
Callable[
[str, dict[str, dict[Capability | str, dict[Attribute | str, Status]]]],
bool,
]
| None
) = None
exists_fn: Callable[[str], bool] | None = None
component_translation_key: dict[str, str] | None = None
deprecated_fn: Callable[
[dict[str, dict[Capability | str, dict[Attribute | str, Status]]]], str | None
] = lambda _: None
CAPABILITY_TO_SENSORS: dict[
@@ -59,16 +59,17 @@ CAPABILITY_TO_SENSORS: dict[
Category.DOOR: BinarySensorDeviceClass.DOOR,
Category.WINDOW: BinarySensorDeviceClass.WINDOW,
},
exists_fn=lambda component, status: (
not ("freezer" in status and "cooler" in status)
if component == MAIN
else True
),
exists_fn=lambda key: key in {"freezer", "cooler", "cvroom"},
component_translation_key={
"freezer": "freezer_door",
"cooler": "cooler_door",
"cvroom": "cool_select_plus_door",
},
deprecated_fn=(
lambda status: "fridge_door"
if "freezer" in status and "cooler" in status
else None
),
)
},
Capability.CUSTOM_DRYER_WRINKLE_PREVENT: {
@@ -154,6 +155,15 @@ CAPABILITY_TO_SENSORS: dict[
entity_category=EntityCategory.DIAGNOSTIC,
)
},
Capability.VALVE: {
Attribute.VALVE: SmartThingsBinarySensorEntityDescription(
key=Attribute.VALVE,
translation_key="valve",
device_class=BinarySensorDeviceClass.OPENING,
is_on_key="open",
deprecated_fn=lambda _: "valve",
)
},
Capability.WATER_SENSOR: {
Attribute.WATER: SmartThingsBinarySensorEntityDescription(
key=Attribute.WATER,
@@ -194,39 +204,64 @@ async def async_setup_entry(
) -> None:
"""Add binary sensors for a config entry."""
entry_data = entry.runtime_data
entities = []
async_add_entities(
SmartThingsBinarySensor(
entry_data.client,
device,
description,
capability,
attribute,
component,
)
for device in entry_data.devices.values()
for capability, attribute_map in CAPABILITY_TO_SENSORS.items()
for attribute, description in attribute_map.items()
for component in device.status
if (
capability in device.status[component]
and (
component == MAIN
or (
description.component_translation_key is not None
and component in description.component_translation_key
)
)
and (
description.exists_fn is None
or description.exists_fn(component, device.status)
)
and (
not description.category
or get_main_component_category(device) in description.category
)
)
)
entity_registry = er.async_get(hass)
for device in entry_data.devices.values(): # pylint: disable=too-many-nested-blocks
for capability, attribute_map in CAPABILITY_TO_SENSORS.items():
for attribute, description in attribute_map.items():
for component in device.status:
if (
capability in device.status[component]
and (
component == MAIN
or (
description.exists_fn is not None
and description.exists_fn(component)
)
)
and (
not description.category
or get_main_component_category(device)
in description.category
)
):
if (
component == MAIN
and (issue := description.deprecated_fn(device.status))
is not None
):
if deprecate_entity(
hass,
entity_registry,
BINARY_SENSOR_DOMAIN,
f"{device.device.device_id}_{component}_{capability}_{attribute}_{attribute}",
f"deprecated_binary_{issue}",
):
entities.append(
SmartThingsBinarySensor(
entry_data.client,
device,
description,
capability,
attribute,
component,
)
)
continue
entities.append(
SmartThingsBinarySensor(
entry_data.client,
device,
description,
capability,
attribute,
component,
)
)
async_add_entities(entities)
class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity):

View File

@@ -82,14 +82,8 @@
"stop": "mdi:stop"
}
},
"soil_level": {
"default": "mdi:liquid-spot"
},
"spin_level": {
"default": "mdi:rotate-right"
},
"water_temperature": {
"default": "mdi:water-thermometer"
}
},
"sensor": {

View File

@@ -56,34 +56,6 @@ WASHER_SPIN_LEVEL_TO_HA = {
"1600": "1600",
}
WASHER_WATER_TEMPERATURE_TO_HA = {
"none": "none",
"20": "20",
"30": "30",
"40": "40",
"50": "50",
"60": "60",
"65": "65",
"70": "70",
"75": "75",
"80": "80",
"90": "90",
"95": "95",
"tapCold": "tap_cold",
"cold": "cold",
"cool": "cool",
"ecoWarm": "eco_warm",
"warm": "warm",
"semiHot": "semi_hot",
"hot": "hot",
"extraHot": "extra_hot",
"extraLow": "extra_low",
"low": "low",
"mediumLow": "medium_low",
"medium": "medium",
"high": "high",
}
@dataclass(frozen=True, kw_only=True)
class SmartThingsSelectDescription(SelectEntityDescription):
@@ -175,16 +147,6 @@ CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = {
options_map=WASHER_SOIL_LEVEL_TO_HA,
entity_category=EntityCategory.CONFIG,
),
Capability.CUSTOM_WASHER_WATER_TEMPERATURE: SmartThingsSelectDescription(
key=Capability.CUSTOM_WASHER_WATER_TEMPERATURE,
translation_key="water_temperature",
requires_remote_control_status=True,
options_attribute=Attribute.SUPPORTED_WASHER_WATER_TEMPERATURE,
status_attribute=Attribute.WASHER_WATER_TEMPERATURE,
command=Command.SET_WASHER_WATER_TEMPERATURE,
options_map=WASHER_WATER_TEMPERATURE_TO_HA,
entity_category=EntityCategory.CONFIG,
),
}

Some files were not shown because too many files have changed in this diff Show More