mirror of
https://github.com/home-assistant/core.git
synced 2025-10-02 00:09:26 +00:00
Compare commits
69 Commits
select-sel
...
progress-a
Author | SHA1 | Date | |
---|---|---|---|
![]() |
04860f666f | ||
![]() |
8d009184a1 | ||
![]() |
7cf29ef136 | ||
![]() |
904d7e5d5a | ||
![]() |
dbc4a65d48 | ||
![]() |
b93f4aabf1 | ||
![]() |
9eaa40c7a4 | ||
![]() |
b308a882fb | ||
![]() |
7f63ba2087 | ||
![]() |
d7269cfcc6 | ||
![]() |
2850a574f6 | ||
![]() |
dcb8d4f702 | ||
![]() |
aeadc0c4b0 | ||
![]() |
683c6b17be | ||
![]() |
69dd5c91b7 | ||
![]() |
5cf7dfca8f | ||
![]() |
62a49d4244 | ||
![]() |
93ee6322f2 | ||
![]() |
914990b58a | ||
![]() |
f78bb5adb6 | ||
![]() |
905f5e7289 | ||
![]() |
ec503618c3 | ||
![]() |
7a41cbc314 | ||
![]() |
c58ba734e7 | ||
![]() |
68f63be62f | ||
![]() |
2aa4ca1351 | ||
![]() |
fbabb27787 | ||
![]() |
0960d78eb5 | ||
![]() |
474b40511f | ||
![]() |
18b80aced3 | ||
![]() |
b964d362b7 | ||
![]() |
3914e41f3c | ||
![]() |
82bdfcb99b | ||
![]() |
976cea600f | ||
![]() |
8c8713c3f7 | ||
![]() |
2359ae6ce7 | ||
![]() |
b570fd35c8 | ||
![]() |
9d94e6b3b4 | ||
![]() |
cfab789823 | ||
![]() |
81917425dc | ||
![]() |
bfb62709d4 | ||
![]() |
ca3f2ee782 | ||
![]() |
fc8703a40f | ||
![]() |
80517c7ac1 | ||
![]() |
2b4b46eaf8 | ||
![]() |
40b9dae608 | ||
![]() |
5975cd6e09 | ||
![]() |
258c9ff52b | ||
![]() |
89c5d498a4 | ||
![]() |
76cb4d123a | ||
![]() |
f0c29c7699 | ||
![]() |
aa4151ced7 | ||
![]() |
0a6fa978fa | ||
![]() |
dc02002b9d | ||
![]() |
f071a3f38b | ||
![]() |
b935231e47 | ||
![]() |
b9f7613567 | ||
![]() |
1289a031ab | ||
![]() |
289546ef6d | ||
![]() |
aacff4db5d | ||
![]() |
f833b56122 | ||
![]() |
7eb0f2993f | ||
![]() |
abb341abfe | ||
![]() |
0d90614369 | ||
![]() |
ec84bebeea | ||
![]() |
9176867d6b | ||
![]() |
281a137ff5 | ||
![]() |
d6543480ac | ||
![]() |
ae6391b866 |
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -190,7 +190,7 @@ jobs:
|
||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -332,14 +332,14 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -504,7 +504,7 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -711,7 +711,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
|
||||
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
@@ -22,6 +22,17 @@ class OAuth2FlowHandler(
|
||||
VERSION = CONFIG_FLOW_VERSION
|
||||
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Check we have the cloud integration set up."""
|
||||
if "cloud" not in self.hass.config.components:
|
||||
return self.async_abort(
|
||||
reason="cloud_not_enabled",
|
||||
description_placeholders={"default_config": "default_config"},
|
||||
)
|
||||
return await super().async_step_user(user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -24,7 +24,8 @@
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.",
|
||||
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.6"]
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
}
|
||||
|
@@ -551,7 +551,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
for domain, integration_info in integration_inputs.items()
|
||||
if (integration := integrations.get(domain)) is not None
|
||||
and integration.is_built_in
|
||||
and integration.integration_type in ("device", "hub")
|
||||
and integration.manifest.get("integration_type") in ("device", "hub")
|
||||
}
|
||||
|
||||
# Call integrations that implement the analytics platform
|
||||
|
@@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import frame
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
|
||||
|
||||
from . import util
|
||||
from .agent import BackupAgent
|
||||
@@ -144,7 +145,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
else:
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream))
|
||||
|
||||
worker_done_event = asyncio.Event()
|
||||
|
||||
@@ -152,7 +153,7 @@ class DownloadBackupView(HomeAssistantView):
|
||||
"""Call by the worker thread when it's done."""
|
||||
hass.loop.call_soon_threadsafe(worker_done_event.set)
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
stream = AsyncIteratorWriter(hass.loop)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup,
|
||||
args=[backup, reader, stream, password, on_done, 0, []],
|
||||
|
@@ -38,6 +38,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
from homeassistant.util.async_iterator import AsyncIteratorReader
|
||||
|
||||
from . import util as backup_util
|
||||
from .agent import (
|
||||
@@ -72,7 +73,6 @@ from .models import (
|
||||
)
|
||||
from .store import BackupStore
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
DecryptedBackupStreamer,
|
||||
EncryptedBackupStreamer,
|
||||
make_backup_dir,
|
||||
@@ -1525,7 +1525,7 @@ class BackupManager:
|
||||
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
|
||||
else:
|
||||
backup_stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream))
|
||||
try:
|
||||
await self.hass.async_add_executor_job(
|
||||
validate_password_stream, reader, password
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from concurrent.futures import CancelledError, Future
|
||||
import copy
|
||||
from dataclasses import dataclass, replace
|
||||
from io import BytesIO
|
||||
@@ -14,7 +13,7 @@ from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
import threading
|
||||
from typing import IO, Any, Self, cast
|
||||
from typing import IO, Any, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
|
||||
@@ -23,6 +22,11 @@ from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_iterator import (
|
||||
Abort,
|
||||
AsyncIteratorReader,
|
||||
AsyncIteratorWriter,
|
||||
)
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
@@ -59,12 +63,6 @@ class BackupEmpty(DecryptError):
|
||||
_message = "No tar files found in the backup."
|
||||
|
||||
|
||||
class AbortCipher(HomeAssistantError):
|
||||
"""Abort the cipher operation."""
|
||||
|
||||
_message = "Abort cipher operation."
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
@@ -166,106 +164,6 @@ def validate_password(path: Path, password: str | None) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class AsyncIteratorReader:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._next_future: Future[bytes | None] | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the reader."""
|
||||
self._aborted = True
|
||||
if self._next_future is not None:
|
||||
self._next_future.cancel()
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._next_future = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._next_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._buffer = self._next_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
break
|
||||
chunk = self._buffer[self._pos : self._pos + n]
|
||||
result.extend(chunk)
|
||||
n -= len(chunk)
|
||||
self._pos += len(chunk)
|
||||
if self._pos == len(self._buffer):
|
||||
self._buffer = None
|
||||
return bytes(result)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the iterator."""
|
||||
|
||||
|
||||
class AsyncIteratorWriter:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._aborted = False
|
||||
self._hass = hass
|
||||
self._pos: int = 0
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
self._write_future: Future[bytes | None] | None = None
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Get the next chunk from the iterator."""
|
||||
if data := await self._queue.get():
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def abort(self) -> None:
|
||||
"""Abort the writer."""
|
||||
self._aborted = True
|
||||
if self._write_future is not None:
|
||||
self._write_future.cancel()
|
||||
|
||||
def tell(self) -> int:
|
||||
"""Return the current position in the iterator."""
|
||||
return self._pos
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
self._write_future = asyncio.run_coroutine_threadsafe(
|
||||
self._queue.put(s), self._hass.loop
|
||||
)
|
||||
if self._aborted:
|
||||
self._write_future.cancel()
|
||||
raise AbortCipher
|
||||
try:
|
||||
self._write_future.result()
|
||||
except CancelledError as err:
|
||||
raise AbortCipher from err
|
||||
self._pos += len(s)
|
||||
return len(s)
|
||||
|
||||
|
||||
def validate_password_stream(
|
||||
input_stream: IO[bytes],
|
||||
password: str | None,
|
||||
@@ -342,7 +240,7 @@ def decrypt_backup(
|
||||
finally:
|
||||
# Write an empty chunk to signal the end of the stream
|
||||
output_stream.write(b"")
|
||||
except AbortCipher:
|
||||
except Abort:
|
||||
LOGGER.debug("Cipher operation aborted")
|
||||
finally:
|
||||
on_done(error)
|
||||
@@ -430,7 +328,7 @@ def encrypt_backup(
|
||||
finally:
|
||||
# Write an empty chunk to signal the end of the stream
|
||||
output_stream.write(b"")
|
||||
except AbortCipher:
|
||||
except Abort:
|
||||
LOGGER.debug("Cipher operation aborted")
|
||||
finally:
|
||||
on_done(error)
|
||||
@@ -557,8 +455,8 @@ class _CipherBackupStreamer:
|
||||
self._hass.loop.call_soon_threadsafe(worker_status.done.set)
|
||||
|
||||
stream = await self._open_stream()
|
||||
reader = AsyncIteratorReader(self._hass, stream)
|
||||
writer = AsyncIteratorWriter(self._hass)
|
||||
reader = AsyncIteratorReader(self._hass.loop, stream)
|
||||
writer = AsyncIteratorWriter(self._hass.loop)
|
||||
worker = threading.Thread(
|
||||
target=self._cipher_func,
|
||||
args=[
|
||||
|
@@ -73,11 +73,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
|
||||
# Add the websocket and API client
|
||||
entry.runtime_data = BangOlufsenData(websocket, client)
|
||||
|
||||
# Start WebSocket connection
|
||||
await client.connect_notifications(remote_control=True, reconnect=True)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
# Start WebSocket connection once the platforms have been loaded.
|
||||
# This ensures that the initial WebSocket notifications are dispatched to entities
|
||||
await client.connect_notifications(remote_control=True, reconnect=True)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -125,7 +125,8 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
new_entities=[
|
||||
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
|
||||
]
|
||||
],
|
||||
update_before_add=True,
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
@@ -266,34 +267,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._software_status.software_version,
|
||||
)
|
||||
|
||||
# Get overall device state once. This is handled by WebSocket events the rest of the time.
|
||||
product_state = await self._client.get_product_state()
|
||||
|
||||
# Get volume information.
|
||||
if product_state.volume:
|
||||
self._volume = product_state.volume
|
||||
|
||||
# Get all playback information.
|
||||
# Ensure that the metadata is not None upon startup
|
||||
if product_state.playback:
|
||||
if product_state.playback.metadata:
|
||||
self._playback_metadata = product_state.playback.metadata
|
||||
self._remote_leader = product_state.playback.metadata.remote_leader
|
||||
if product_state.playback.progress:
|
||||
self._playback_progress = product_state.playback.progress
|
||||
if product_state.playback.source:
|
||||
self._source_change = product_state.playback.source
|
||||
if product_state.playback.state:
|
||||
self._playback_state = product_state.playback.state
|
||||
# Set initial state
|
||||
if self._playback_state.value:
|
||||
self._state = self._playback_state.value
|
||||
|
||||
self._attr_media_position_updated_at = utcnow()
|
||||
|
||||
# Get the highest resolution available of the given images.
|
||||
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
|
||||
|
||||
# If the device has been updated with new sources, then the API will fail here.
|
||||
await self._async_update_sources()
|
||||
|
||||
|
@@ -315,9 +315,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.register_view(CalendarListView(component))
|
||||
hass.http.register_view(CalendarEventView(component))
|
||||
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "calendar", "calendar", "hass:calendar"
|
||||
)
|
||||
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar")
|
||||
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_create)
|
||||
websocket_api.async_register_command(hass, handle_calendar_event_delete)
|
||||
|
@@ -53,7 +53,6 @@ from .const import (
|
||||
CONF_ACME_SERVER,
|
||||
CONF_ALEXA,
|
||||
CONF_ALIASES,
|
||||
CONF_CLOUDHOOK_SERVER,
|
||||
CONF_COGNITO_CLIENT_ID,
|
||||
CONF_ENTITY_CONFIG,
|
||||
CONF_FILTER,
|
||||
@@ -130,7 +129,6 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
|
||||
vol.Optional(CONF_ACCOUNTS_SERVER): str,
|
||||
vol.Optional(CONF_ACME_SERVER): str,
|
||||
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
|
||||
vol.Optional(CONF_RELAYER_SERVER): str,
|
||||
vol.Optional(CONF_REMOTESTATE_SERVER): str,
|
||||
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,
|
||||
|
@@ -78,7 +78,6 @@ CONF_USER_POOL_ID = "user_pool_id"
|
||||
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
|
||||
CONF_ACCOUNTS_SERVER = "accounts_server"
|
||||
CONF_ACME_SERVER = "acme_server"
|
||||
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
|
||||
CONF_RELAYER_SERVER = "relayer_server"
|
||||
CONF_REMOTESTATE_SERVER = "remotestate_server"
|
||||
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"
|
||||
|
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.2"],
|
||||
"requirements": ["hass-nabucasa==1.2.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
106
homeassistant/components/co2signal/quality_scale.yaml
Normal file
106
homeassistant/components/co2signal/quality_scale.yaml
Normal file
@@ -0,0 +1,106 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Stale docstring and test name: `test_form_home` and reusing result.
|
||||
Extract `async_setup_entry` into own fixture.
|
||||
Avoid importing `config_flow` in tests.
|
||||
Test reauth with errors
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
The config flow misses data descriptions.
|
||||
Remove URLs from data descriptions, they should be replaced with placeholders.
|
||||
Make use of Electricity Maps zone keys in country code as dropdown.
|
||||
Make use of location selector for coordinates.
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration do not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: todo
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any additional options.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
|
||||
`test_sensor` could use `snapshot_platform`
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration cannot be discovered, it is a connecting to a cloud service.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration cannot be discovered, it is a connecting to a cloud service.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration connects to a single service per configuration entry.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not raise any repairable issues.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connect to a single device per configuration entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the config component."""
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "config", "config", "hass:cog", require_admin=True
|
||||
hass, "config", "config", "mdi:cog", require_admin=True
|
||||
)
|
||||
|
||||
for panel in SECTIONS:
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any, NoReturn
|
||||
|
||||
from aiohttp import web
|
||||
@@ -23,7 +24,12 @@ from homeassistant.helpers.data_entry_flow import (
|
||||
FlowManagerResourceView,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.json import json_fragment
|
||||
from homeassistant.helpers.json import (
|
||||
JSON_DUMP,
|
||||
find_paths_unserializable_data,
|
||||
json_bytes,
|
||||
json_fragment,
|
||||
)
|
||||
from homeassistant.loader import (
|
||||
Integration,
|
||||
IntegrationNotFound,
|
||||
@@ -31,6 +37,9 @@ from homeassistant.loader import (
|
||||
async_get_integrations,
|
||||
async_get_loaded_integration,
|
||||
)
|
||||
from homeassistant.util.json import format_unserializable_data
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -402,18 +411,40 @@ def config_entries_flow_subscribe(
|
||||
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
|
||||
async_on_flow_init_remove
|
||||
)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
[
|
||||
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"]
|
||||
not in (
|
||||
config_entries.SOURCE_RECONFIGURE,
|
||||
config_entries.SOURCE_USER,
|
||||
try:
|
||||
serialized_flows = [
|
||||
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
|
||||
for flw in hass.config_entries.flow.async_progress()
|
||||
if flw["context"]["source"]
|
||||
not in (
|
||||
config_entries.SOURCE_RECONFIGURE,
|
||||
config_entries.SOURCE_USER,
|
||||
)
|
||||
]
|
||||
except (ValueError, TypeError):
|
||||
# If we can't serialize, we'll filter out unserializable flows
|
||||
serialized_flows = []
|
||||
for flw in hass.config_entries.flow.async_progress():
|
||||
if flw["context"]["source"] in (
|
||||
config_entries.SOURCE_RECONFIGURE,
|
||||
config_entries.SOURCE_USER,
|
||||
):
|
||||
continue
|
||||
try:
|
||||
serialized_flows.append(
|
||||
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
|
||||
)
|
||||
],
|
||||
except (ValueError, TypeError):
|
||||
_LOGGER.error(
|
||||
"Unable to serialize to JSON. Bad data found at %s",
|
||||
format_unserializable_data(
|
||||
find_paths_unserializable_data(flw, dump=JSON_DUMP)
|
||||
),
|
||||
)
|
||||
continue
|
||||
connection.send_message(
|
||||
websocket_api.messages.construct_event_message(
|
||||
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]"))
|
||||
)
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
import logging
|
||||
@@ -55,16 +56,16 @@ async def async_setup_entry(
|
||||
hostname = entry.data[CONF_HOSTNAME]
|
||||
name = entry.data[CONF_NAME]
|
||||
|
||||
resolver_ipv4 = entry.options[CONF_RESOLVER]
|
||||
resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
|
||||
nameserver_ipv4 = entry.options[CONF_RESOLVER]
|
||||
nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
|
||||
port_ipv4 = entry.options[CONF_PORT]
|
||||
port_ipv6 = entry.options[CONF_PORT_IPV6]
|
||||
|
||||
entities = []
|
||||
if entry.data[CONF_IPV4]:
|
||||
entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4))
|
||||
entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4))
|
||||
if entry.data[CONF_IPV6]:
|
||||
entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6))
|
||||
entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6))
|
||||
|
||||
async_add_entities(entities, update_before_add=True)
|
||||
|
||||
@@ -76,11 +77,13 @@ class WanIpSensor(SensorEntity):
|
||||
_attr_translation_key = "dnsip"
|
||||
_unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"})
|
||||
|
||||
resolver: aiodns.DNSResolver
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
hostname: str,
|
||||
resolver: str,
|
||||
nameserver: str,
|
||||
ipv6: bool,
|
||||
port: int,
|
||||
) -> None:
|
||||
@@ -88,12 +91,12 @@ class WanIpSensor(SensorEntity):
|
||||
self._attr_name = "IPv6" if ipv6 else None
|
||||
self._attr_unique_id = f"{hostname}_{ipv6}"
|
||||
self.hostname = hostname
|
||||
self.resolver = aiodns.DNSResolver(tcp_port=port, udp_port=port)
|
||||
self.resolver.nameservers = [resolver]
|
||||
self.port = port
|
||||
self.nameserver = nameserver
|
||||
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
|
||||
self._retries = DEFAULT_RETRIES
|
||||
self._attr_extra_state_attributes = {
|
||||
"resolver": resolver,
|
||||
"resolver": nameserver,
|
||||
"querytype": self.querytype,
|
||||
}
|
||||
self._attr_device_info = DeviceInfo(
|
||||
@@ -103,14 +106,26 @@ class WanIpSensor(SensorEntity):
|
||||
model=aiodns.__version__,
|
||||
name=name,
|
||||
)
|
||||
self.create_dns_resolver()
|
||||
|
||||
def create_dns_resolver(self) -> None:
|
||||
"""Create the DNS resolver."""
|
||||
self.resolver = aiodns.DNSResolver(
|
||||
nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port
|
||||
)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
if self.resolver._closed: # noqa: SLF001
|
||||
self.create_dns_resolver()
|
||||
response = None
|
||||
try:
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
async with asyncio.timeout(10):
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except TimeoutError:
|
||||
await self.resolver.close()
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
if response:
|
||||
sorted_ips = sort_ips(
|
||||
|
@@ -116,7 +116,11 @@ class EbusdData:
|
||||
try:
|
||||
_LOGGER.debug("Opening socket to ebusd %s", name)
|
||||
command_result = ebusdpy.write(self._address, self._circuit, name, value)
|
||||
if command_result is not None and "done" not in command_result:
|
||||
if (
|
||||
command_result is not None
|
||||
and "done" not in command_result
|
||||
and "empty" not in command_result
|
||||
):
|
||||
_LOGGER.warning("Write command failed: %s", name)
|
||||
except RuntimeError as err:
|
||||
_LOGGER.error(err)
|
||||
|
@@ -2,3 +2,4 @@ raw_get_positions:
|
||||
target:
|
||||
entity:
|
||||
domain: vacuum
|
||||
integration: ecovacs
|
||||
|
@@ -46,6 +46,9 @@ async def async_get_config_entry_diagnostics(
|
||||
}
|
||||
for _, device in avm_wrapper.devices.items()
|
||||
],
|
||||
"cpu_temperatures": await hass.async_add_executor_job(
|
||||
avm_wrapper.fritz_status.get_cpu_temperatures
|
||||
),
|
||||
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
|
||||
},
|
||||
}
|
||||
|
@@ -459,7 +459,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"developer-tools",
|
||||
require_admin=True,
|
||||
sidebar_title="developer_tools",
|
||||
sidebar_icon="hass:hammer",
|
||||
sidebar_icon="mdi:hammer",
|
||||
)
|
||||
|
||||
@callback
|
||||
|
@@ -68,6 +68,7 @@ EVENT_HEALTH_CHANGED = "health_changed"
|
||||
EVENT_SUPPORTED_CHANGED = "supported_changed"
|
||||
EVENT_ISSUE_CHANGED = "issue_changed"
|
||||
EVENT_ISSUE_REMOVED = "issue_removed"
|
||||
EVENT_JOB = "job"
|
||||
|
||||
UPDATE_KEY_SUPERVISOR = "supervisor"
|
||||
|
||||
|
@@ -56,6 +56,7 @@ from .const import (
|
||||
SupervisorEntityModel,
|
||||
)
|
||||
from .handler import HassioAPIError, get_supervisor_client
|
||||
from .jobs import SupervisorJobs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .issues import SupervisorIssues
|
||||
@@ -311,6 +312,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
lambda: defaultdict(set)
|
||||
)
|
||||
self.supervisor_client = get_supervisor_client(hass)
|
||||
self.jobs = SupervisorJobs(hass)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Update data via library."""
|
||||
@@ -485,6 +487,9 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
)
|
||||
|
||||
# Refresh jobs data
|
||||
await self.jobs.refresh_data(first_update)
|
||||
|
||||
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
|
||||
"""Update single addon stats."""
|
||||
try:
|
||||
|
157
homeassistant/components/hassio/jobs.py
Normal file
157
homeassistant/components/hassio/jobs.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""Track Supervisor job data and allow subscription to updates."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, replace
|
||||
from functools import partial
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from aiohasupervisor.models import Job
|
||||
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
is_callback_check_partial,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .const import (
|
||||
ATTR_DATA,
|
||||
ATTR_UPDATE_KEY,
|
||||
ATTR_WS_EVENT,
|
||||
EVENT_JOB,
|
||||
EVENT_SUPERVISOR_EVENT,
|
||||
EVENT_SUPERVISOR_UPDATE,
|
||||
UPDATE_KEY_SUPERVISOR,
|
||||
)
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class JobSubscription:
|
||||
"""Subscribe for updates on jobs which match filters.
|
||||
|
||||
UUID is preferred match but only available in cases of a background API that
|
||||
returns the UUID before taking the action. Others are used to match jobs only
|
||||
if UUID is omitted. Either name or UUID is required to be able to match.
|
||||
|
||||
event_callback must be safe annotated as a homeassistant.core.callback
|
||||
and safe to call in the event loop.
|
||||
"""
|
||||
|
||||
event_callback: Callable[[Job], Any]
|
||||
uuid: str | None = None
|
||||
name: str | None = None
|
||||
reference: str | None | type[Any] = Any
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Validate at least one filter option is present."""
|
||||
if not self.name and not self.uuid:
|
||||
raise ValueError("Either name or uuid must be provided!")
|
||||
if not is_callback_check_partial(self.event_callback):
|
||||
raise ValueError("event_callback must be a homeassistant.core.callback!")
|
||||
|
||||
def matches(self, job: Job) -> bool:
|
||||
"""Return true if job matches subscription filters."""
|
||||
if self.uuid:
|
||||
return job.uuid == self.uuid
|
||||
return job.name == self.name and self.reference in (Any, job.reference)
|
||||
|
||||
|
||||
class SupervisorJobs:
|
||||
"""Manage access to Supervisor jobs."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize object."""
|
||||
self._hass = hass
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
self._jobs: dict[UUID, Job] = {}
|
||||
self._subscriptions: set[JobSubscription] = set()
|
||||
|
||||
@property
|
||||
def current_jobs(self) -> list[Job]:
|
||||
"""Return current jobs."""
|
||||
return list(self._jobs.values())
|
||||
|
||||
def subscribe(self, subscription: JobSubscription) -> CALLBACK_TYPE:
|
||||
"""Subscribe to updates for job. Return callback is used to unsubscribe.
|
||||
|
||||
If any jobs match the subscription at the time this is called, creates
|
||||
tasks to run their callback on it.
|
||||
"""
|
||||
self._subscriptions.add(subscription)
|
||||
|
||||
# As these are callbacks they are safe to run in the event loop
|
||||
# We wrap these in an asyncio task so subscribing does not wait on the logic
|
||||
if matches := [job for job in self._jobs.values() if subscription.matches(job)]:
|
||||
|
||||
async def event_callback_async(job: Job) -> Any:
|
||||
return subscription.event_callback(job)
|
||||
|
||||
for match in matches:
|
||||
self._hass.async_create_task(event_callback_async(match))
|
||||
|
||||
return partial(self._subscriptions.discard, subscription)
|
||||
|
||||
async def refresh_data(self, first_update: bool = False) -> None:
|
||||
"""Refresh job data."""
|
||||
job_data = await self._supervisor_client.jobs.info()
|
||||
job_queue: list[Job] = job_data.jobs.copy()
|
||||
new_jobs: dict[UUID, Job] = {}
|
||||
changed_jobs: list[Job] = []
|
||||
|
||||
# Rebuild our job cache from new info and compare to find changes
|
||||
while job_queue:
|
||||
job = job_queue.pop(0)
|
||||
job_queue.extend(job.child_jobs)
|
||||
job = replace(job, child_jobs=[])
|
||||
|
||||
if job.uuid not in self._jobs or job != self._jobs[job.uuid]:
|
||||
changed_jobs.append(job)
|
||||
new_jobs[job.uuid] = replace(job, child_jobs=[])
|
||||
|
||||
# For any jobs that disappeared which weren't done, tell subscribers they
|
||||
# changed to done. We don't know what else happened to them so leave the
|
||||
# rest of their state as is rather then guessing
|
||||
changed_jobs.extend(
|
||||
[
|
||||
replace(job, done=True)
|
||||
for uuid, job in self._jobs.items()
|
||||
if uuid not in new_jobs and job.done is False
|
||||
]
|
||||
)
|
||||
|
||||
# Replace our cache and inform subscribers of all changes
|
||||
self._jobs = new_jobs
|
||||
for job in changed_jobs:
|
||||
self._process_job_change(job)
|
||||
|
||||
# If this is the first update register to receive Supervisor events
|
||||
if first_update:
|
||||
async_dispatcher_connect(
|
||||
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_jobs
|
||||
)
|
||||
|
||||
@callback
|
||||
def _supervisor_events_to_jobs(self, event: dict[str, Any]) -> None:
|
||||
"""Update job data cache from supervisor events."""
|
||||
if ATTR_WS_EVENT not in event:
|
||||
return
|
||||
|
||||
if (
|
||||
event[ATTR_WS_EVENT] == EVENT_SUPERVISOR_UPDATE
|
||||
and event.get(ATTR_UPDATE_KEY) == UPDATE_KEY_SUPERVISOR
|
||||
):
|
||||
self._hass.async_create_task(self.refresh_data())
|
||||
|
||||
elif event[ATTR_WS_EVENT] == EVENT_JOB:
|
||||
job = Job.from_dict(event[ATTR_DATA] | {"child_jobs": []})
|
||||
self._jobs[job.uuid] = job
|
||||
self._process_job_change(job)
|
||||
|
||||
def _process_job_change(self, job: Job) -> None:
|
||||
"""Process a job change by triggering callbacks on subscribers."""
|
||||
for sub in self._subscriptions:
|
||||
if sub.matches(job):
|
||||
sub.event_callback(job)
|
@@ -6,6 +6,7 @@ import re
|
||||
from typing import Any
|
||||
|
||||
from aiohasupervisor import SupervisorError
|
||||
from aiohasupervisor.models import Job
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionStrategy
|
||||
|
||||
from homeassistant.components.update import (
|
||||
@@ -15,7 +16,7 @@ from homeassistant.components.update import (
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ICON, ATTR_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -35,6 +36,7 @@ from .entity import (
|
||||
HassioOSEntity,
|
||||
HassioSupervisorEntity,
|
||||
)
|
||||
from .jobs import JobSubscription
|
||||
from .update_helper import update_addon, update_core, update_os
|
||||
|
||||
ENTITY_DESCRIPTION = UpdateEntityDescription(
|
||||
@@ -89,6 +91,7 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
|
||||
UpdateEntityFeature.INSTALL
|
||||
| UpdateEntityFeature.BACKUP
|
||||
| UpdateEntityFeature.RELEASE_NOTES
|
||||
| UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -154,6 +157,30 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
|
||||
)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
@callback
|
||||
def _update_job_changed(self, job: Job) -> None:
|
||||
"""Process update for this entity's update job."""
|
||||
if job.done is False:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = job.progress
|
||||
else:
|
||||
self._attr_in_progress = False
|
||||
self._attr_update_percentage = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to progress updates."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.coordinator.jobs.subscribe(
|
||||
JobSubscription(
|
||||
self._update_job_changed,
|
||||
name="addon_manager_update",
|
||||
reference=self._addon_slug,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class SupervisorOSUpdateEntity(HassioOSEntity, UpdateEntity):
|
||||
"""Update entity to handle updates for the Home Assistant Operating System."""
|
||||
@@ -250,6 +277,7 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
|
||||
UpdateEntityFeature.INSTALL
|
||||
| UpdateEntityFeature.SPECIFIC_VERSION
|
||||
| UpdateEntityFeature.BACKUP
|
||||
| UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
_attr_title = "Home Assistant Core"
|
||||
|
||||
@@ -281,3 +309,25 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
|
||||
) -> None:
|
||||
"""Install an update."""
|
||||
await update_core(self.hass, version, backup)
|
||||
|
||||
@callback
|
||||
def _update_job_changed(self, job: Job) -> None:
|
||||
"""Process update for this entity's update job."""
|
||||
if job.done is False:
|
||||
self._attr_in_progress = True
|
||||
self._attr_update_percentage = job.progress
|
||||
else:
|
||||
self._attr_in_progress = False
|
||||
self._attr_update_percentage = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to progress updates."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.coordinator.jobs.subscribe(
|
||||
JobSubscription(
|
||||
self._update_job_changed, name="home_assistant_core_update"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@@ -46,7 +46,7 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the history hooks."""
|
||||
hass.http.register_view(HistoryPeriodView())
|
||||
frontend.async_register_built_in_panel(hass, "history", "history", "hass:chart-box")
|
||||
frontend.async_register_built_in_panel(hass, "history", "history", "mdi:chart-box")
|
||||
websocket_api.async_setup(hass)
|
||||
return True
|
||||
|
||||
|
@@ -27,6 +27,12 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -69,12 +75,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -129,14 +133,21 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{model}",
|
||||
"step": {
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"pick_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
@@ -158,12 +169,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -215,9 +224,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -61,6 +61,13 @@ class PickedFirmwareType(StrEnum):
|
||||
ZIGBEE = "zigbee"
|
||||
|
||||
|
||||
class ZigbeeFlowStrategy(StrEnum):
|
||||
"""Zigbee setup strategies that can be picked."""
|
||||
|
||||
ADVANCED = "advanced"
|
||||
RECOMMENDED = "recommended"
|
||||
|
||||
|
||||
class ZigbeeIntegration(StrEnum):
|
||||
"""Zigbee integrations that can be picked."""
|
||||
|
||||
@@ -73,6 +80,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
|
||||
_picked_firmware_type: PickedFirmwareType
|
||||
_zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate base flow."""
|
||||
@@ -395,12 +403,14 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
) -> ConfigFlowResult:
|
||||
"""Select recommended installation type."""
|
||||
self._zigbee_integration = ZigbeeIntegration.ZHA
|
||||
self._zigbee_flow_strategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
return await self._async_continue_picked_firmware()
|
||||
|
||||
async def async_step_zigbee_intent_custom(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select custom installation type."""
|
||||
self._zigbee_flow_strategy = ZigbeeFlowStrategy.ADVANCED
|
||||
return await self.async_step_zigbee_integration()
|
||||
|
||||
async def async_step_zigbee_integration(
|
||||
@@ -521,6 +531,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"flow_control": "hardware",
|
||||
},
|
||||
"radio_type": "ezsp",
|
||||
"flow_strategy": self._zigbee_flow_strategy,
|
||||
},
|
||||
)
|
||||
return self._continue_zha_flow(result)
|
||||
|
@@ -23,12 +23,16 @@
|
||||
"description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration."
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "Installing OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is being installed."
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "Starting OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is now starting."
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "Failed to set up OpenThread Border Router",
|
||||
@@ -72,7 +76,9 @@
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
"install_firmware": "Installing {firmware_name} firmware.\n\nDo not make any changes to your hardware or software until this finishes.",
|
||||
"install_otbr_addon": "Installing add-on",
|
||||
"start_otbr_addon": "Starting add-on"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -27,6 +27,12 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -69,12 +75,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -129,9 +133,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -158,12 +163,16 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -215,9 +224,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -35,6 +35,12 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -92,12 +98,10 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -154,9 +158,10 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@@ -2,8 +2,12 @@
|
||||
|
||||
reload:
|
||||
reset_accessory:
|
||||
target:
|
||||
entity: {}
|
||||
fields:
|
||||
entity_id:
|
||||
required: true
|
||||
selector:
|
||||
entity:
|
||||
multiple: true
|
||||
|
||||
unpair:
|
||||
fields:
|
||||
|
@@ -76,7 +76,13 @@
|
||||
},
|
||||
"reset_accessory": {
|
||||
"name": "Reset accessory",
|
||||
"description": "Resets a HomeKit accessory."
|
||||
"description": "Resets a HomeKit accessory.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity to reset."
|
||||
}
|
||||
}
|
||||
},
|
||||
"unpair": {
|
||||
"name": "Unpair an accessory or bridge",
|
||||
|
@@ -145,7 +145,11 @@ class HueMotionSensor(HueBaseEntity, BinarySensorEntity):
|
||||
if not self.resource.enabled:
|
||||
# Force None (unknown) if the sensor is set to disabled in Hue
|
||||
return None
|
||||
return self.resource.motion.value
|
||||
if not (motion_feature := self.resource.motion):
|
||||
return None
|
||||
if motion_feature.motion_report is not None:
|
||||
return motion_feature.motion_report.motion
|
||||
return motion_feature.motion
|
||||
|
||||
|
||||
# pylint: disable-next=hass-enforce-class-module
|
||||
|
@@ -169,6 +169,12 @@
|
||||
},
|
||||
"energy_battery_consumed": {
|
||||
"default": "mdi:battery-arrow-down-outline"
|
||||
},
|
||||
"forecast_cons_remaining_today": {
|
||||
"default": "mdi:chart-line"
|
||||
},
|
||||
"forecast_prod_remaining_today": {
|
||||
"default": "mdi:chart-line"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
|
@@ -417,6 +417,21 @@ SENSOR_DESCRIPTIONS = (
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
# Forecast
|
||||
SensorEntityDescription(
|
||||
key="forecast_cons_remaining_today",
|
||||
translation_key="forecast_cons_remaining_today",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="forecast_prod_remaining_today",
|
||||
translation_key="forecast_prod_remaining_today",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
suggested_display_precision=2,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -213,6 +213,12 @@
|
||||
},
|
||||
"energy_battery_consumed": {
|
||||
"name": "Today battery-consumed energy"
|
||||
},
|
||||
"forecast_cons_remaining_today": {
|
||||
"name": "Forecast remaining energy consumption for today"
|
||||
},
|
||||
"forecast_prod_remaining_today": {
|
||||
"name": "Forecast remaining energy production for today"
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
|
@@ -35,7 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .config_flow import ( # Loading the config flow file will register the flow
|
||||
@@ -221,6 +221,19 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Konnected platform."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_firmware",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_firmware",
|
||||
translation_placeholders={
|
||||
"kb_page_url": "https://support.konnected.io/migrating-from-konnected-legacy-home-assistant-integration-to-esphome",
|
||||
},
|
||||
)
|
||||
if (cfg := config.get(DOMAIN)) is None:
|
||||
cfg = {}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "konnected",
|
||||
"name": "Konnected.io",
|
||||
"name": "Konnected.io (Legacy)",
|
||||
"codeowners": ["@heythisisnate"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["http"],
|
||||
|
@@ -105,5 +105,11 @@
|
||||
"abort": {
|
||||
"not_konn_panel": "[%key:component::konnected::config::abort::not_konn_panel%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_firmware": {
|
||||
"title": "Konnected firmware is deprecated",
|
||||
"description": "Konnected's integration is deprecated and Konnected strongly recommends migrating to their ESPHome based firmware and integration by following the guide at {kb_page_url}. After this migration, make sure you don't have any Konnected YAML configuration left in your configuration.yaml file and remove this integration from Home Assistant."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -282,9 +282,24 @@
|
||||
"filter_lifetime": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"top_filter_remain_percent": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"used_time": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"water_filter_state": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"water_filter_1_remain_percent": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"water_filter_2_remain_percent": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"water_filter_3_remain_percent": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"current_job_mode": {
|
||||
"default": "mdi:dots-circle"
|
||||
},
|
||||
|
@@ -110,6 +110,11 @@ FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key=ThinQProperty.FILTER_LIFETIME,
|
||||
),
|
||||
ThinQProperty.TOP_FILTER_REMAIN_PERCENT: SensorEntityDescription(
|
||||
key=ThinQProperty.TOP_FILTER_REMAIN_PERCENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key=ThinQProperty.TOP_FILTER_REMAIN_PERCENT,
|
||||
),
|
||||
}
|
||||
HUMIDITY_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
|
||||
ThinQProperty.CURRENT_HUMIDITY: SensorEntityDescription(
|
||||
@@ -221,6 +226,11 @@ REFRIGERATION_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
translation_key=ThinQProperty.FRESH_AIR_FILTER,
|
||||
),
|
||||
ThinQProperty.FRESH_AIR_FILTER_REMAIN_PERCENT: SensorEntityDescription(
|
||||
key=ThinQProperty.FRESH_AIR_FILTER_REMAIN_PERCENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key=ThinQProperty.FRESH_AIR_FILTER,
|
||||
),
|
||||
}
|
||||
RUN_STATE_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
|
||||
ThinQProperty.CURRENT_STATE: SensorEntityDescription(
|
||||
@@ -303,6 +313,25 @@ WATER_FILTER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=UnitOfTime.MONTHS,
|
||||
translation_key=ThinQProperty.USED_TIME,
|
||||
),
|
||||
ThinQProperty.WATER_FILTER_STATE: SensorEntityDescription(
|
||||
key=ThinQProperty.WATER_FILTER_STATE,
|
||||
translation_key=ThinQProperty.WATER_FILTER_STATE,
|
||||
),
|
||||
ThinQProperty.WATER_FILTER_1_REMAIN_PERCENT: SensorEntityDescription(
|
||||
key=ThinQProperty.WATER_FILTER_1_REMAIN_PERCENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key=ThinQProperty.WATER_FILTER_1_REMAIN_PERCENT,
|
||||
),
|
||||
ThinQProperty.WATER_FILTER_2_REMAIN_PERCENT: SensorEntityDescription(
|
||||
key=ThinQProperty.WATER_FILTER_2_REMAIN_PERCENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key=ThinQProperty.WATER_FILTER_2_REMAIN_PERCENT,
|
||||
),
|
||||
ThinQProperty.WATER_FILTER_3_REMAIN_PERCENT: SensorEntityDescription(
|
||||
key=ThinQProperty.WATER_FILTER_3_REMAIN_PERCENT,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key=ThinQProperty.WATER_FILTER_3_REMAIN_PERCENT,
|
||||
),
|
||||
}
|
||||
WATER_INFO_SENSOR_DESC: dict[ThinQProperty, SensorEntityDescription] = {
|
||||
ThinQProperty.WATER_TYPE: SensorEntityDescription(
|
||||
@@ -437,6 +466,7 @@ DEVICE_TYPE_SENSOR_MAP: dict[DeviceType, tuple[SensorEntityDescription, ...]] =
|
||||
AIR_QUALITY_SENSOR_DESC[ThinQProperty.ODOR_LEVEL],
|
||||
AIR_QUALITY_SENSOR_DESC[ThinQProperty.TOTAL_POLLUTION_LEVEL],
|
||||
FILTER_INFO_SENSOR_DESC[ThinQProperty.FILTER_REMAIN_PERCENT],
|
||||
FILTER_INFO_SENSOR_DESC[ThinQProperty.TOP_FILTER_REMAIN_PERCENT],
|
||||
JOB_MODE_SENSOR_DESC[ThinQProperty.CURRENT_JOB_MODE],
|
||||
JOB_MODE_SENSOR_DESC[ThinQProperty.PERSONALIZATION_MODE],
|
||||
TIME_SENSOR_DESC[TimerProperty.ABSOLUTE_TO_START],
|
||||
@@ -513,7 +543,12 @@ DEVICE_TYPE_SENSOR_MAP: dict[DeviceType, tuple[SensorEntityDescription, ...]] =
|
||||
),
|
||||
DeviceType.REFRIGERATOR: (
|
||||
REFRIGERATION_SENSOR_DESC[ThinQProperty.FRESH_AIR_FILTER],
|
||||
REFRIGERATION_SENSOR_DESC[ThinQProperty.FRESH_AIR_FILTER_REMAIN_PERCENT],
|
||||
WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.USED_TIME],
|
||||
WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.WATER_FILTER_STATE],
|
||||
WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.WATER_FILTER_1_REMAIN_PERCENT],
|
||||
WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.WATER_FILTER_2_REMAIN_PERCENT],
|
||||
WATER_FILTER_INFO_SENSOR_DESC[ThinQProperty.WATER_FILTER_3_REMAIN_PERCENT],
|
||||
),
|
||||
DeviceType.ROBOT_CLEANER: (
|
||||
RUN_STATE_SENSOR_DESC[ThinQProperty.CURRENT_STATE],
|
||||
|
@@ -241,7 +241,9 @@
|
||||
"timer_is_complete": "Timer has been completed",
|
||||
"washing_is_complete": "Washing is completed",
|
||||
"water_is_full": "Water is full",
|
||||
"water_leak_has_occurred": "The dishwasher has detected a water leak"
|
||||
"water_leak_has_occurred": "The dishwasher has detected a water leak",
|
||||
"filter_reset_complete": "The filter lifetime has been reset",
|
||||
"water_filter_reset_complete": "The water filter lifetime has been reset"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -608,9 +610,24 @@
|
||||
"filter_lifetime": {
|
||||
"name": "Filter remaining"
|
||||
},
|
||||
"top_filter_remain_percent": {
|
||||
"name": "Upper filter remaining"
|
||||
},
|
||||
"used_time": {
|
||||
"name": "Water filter used"
|
||||
},
|
||||
"water_filter_state": {
|
||||
"name": "Water filter"
|
||||
},
|
||||
"water_filter_1_remain_percent": {
|
||||
"name": "[%key:component::lg_thinq::entity::sensor::water_filter_state::name%]"
|
||||
},
|
||||
"water_filter_2_remain_percent": {
|
||||
"name": "Water filter stage 2"
|
||||
},
|
||||
"water_filter_3_remain_percent": {
|
||||
"name": "Water filter stage 3"
|
||||
},
|
||||
"current_job_mode": {
|
||||
"name": "Operating mode",
|
||||
"state": {
|
||||
|
@@ -3,6 +3,7 @@
|
||||
set_sleep_mode:
|
||||
target:
|
||||
entity:
|
||||
domain: vacuum
|
||||
integration: litterrobot
|
||||
fields:
|
||||
enabled:
|
||||
|
@@ -26,6 +26,7 @@ FIRMWARE_UPDATE_ENTITY = UpdateEntityDescription(
|
||||
key="firmware",
|
||||
device_class=UpdateDeviceClass.FIRMWARE,
|
||||
)
|
||||
RELEASE_URL = "https://www.litter-robot.com/releases.html"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -48,6 +49,7 @@ async def async_setup_entry(
|
||||
class RobotUpdateEntity(LitterRobotEntity[LitterRobot4], UpdateEntity):
|
||||
"""A class that describes robot update entities."""
|
||||
|
||||
_attr_release_url = RELEASE_URL
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
)
|
||||
|
@@ -115,7 +115,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_log_entry(hass, name, message, domain, entity_id, service.context)
|
||||
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "logbook", "logbook", "hass:format-list-bulleted-type"
|
||||
hass, "logbook", "logbook", "mdi:format-list-bulleted-type"
|
||||
)
|
||||
|
||||
recorder_conf = config.get(RECORDER_DOMAIN, {})
|
||||
|
@@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
||||
DOMAIN = "lovelace"
|
||||
LOVELACE_DATA: HassKey[LovelaceData] = HassKey(DOMAIN)
|
||||
|
||||
DEFAULT_ICON = "hass:view-dashboard"
|
||||
DEFAULT_ICON = "mdi:view-dashboard"
|
||||
|
||||
MODE_YAML = "yaml"
|
||||
MODE_STORAGE = "storage"
|
||||
|
@@ -148,6 +148,9 @@
|
||||
},
|
||||
"evse_charging_switch": {
|
||||
"default": "mdi:ev-station"
|
||||
},
|
||||
"privacy_mode_button": {
|
||||
"default": "mdi:shield-lock"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -80,9 +80,7 @@ class MatterNumber(MatterEntity, NumberEntity):
|
||||
sendvalue = int(value)
|
||||
if value_convert := self.entity_description.ha_to_device:
|
||||
sendvalue = value_convert(value)
|
||||
await self.write_attribute(
|
||||
value=sendvalue,
|
||||
)
|
||||
await self.write_attribute(value=sendvalue)
|
||||
|
||||
@callback
|
||||
def _update_from_device(self) -> None:
|
||||
@@ -437,4 +435,35 @@ DISCOVERY_SCHEMAS = [
|
||||
custom_clusters.InovelliCluster.Attributes.LEDIndicatorIntensityOn,
|
||||
),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
key="DoorLockWrongCodeEntryLimit",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="wrong_code_entry_limit",
|
||||
native_max_value=255,
|
||||
native_min_value=1,
|
||||
native_step=1,
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
entity_class=MatterNumber,
|
||||
required_attributes=(clusters.DoorLock.Attributes.WrongCodeEntryLimit,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
key="DoorLockUserCodeTemporaryDisableTime",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="user_code_temporary_disable_time",
|
||||
native_max_value=255,
|
||||
native_min_value=1,
|
||||
native_step=1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
entity_class=MatterNumber,
|
||||
required_attributes=(
|
||||
clusters.DoorLock.Attributes.UserCodeTemporaryDisableTime,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
@@ -198,6 +198,9 @@
|
||||
"pump_setpoint": {
|
||||
"name": "Setpoint"
|
||||
},
|
||||
"user_code_temporary_disable_time": {
|
||||
"name": "User code temporary disable time"
|
||||
},
|
||||
"temperature_offset": {
|
||||
"name": "Temperature offset"
|
||||
},
|
||||
@@ -218,6 +221,9 @@
|
||||
},
|
||||
"valve_configuration_and_control_default_open_duration": {
|
||||
"name": "Default open duration"
|
||||
},
|
||||
"wrong_code_entry_limit": {
|
||||
"name": "Wrong code limit"
|
||||
}
|
||||
},
|
||||
"light": {
|
||||
@@ -513,6 +519,9 @@
|
||||
},
|
||||
"evse_charging_switch": {
|
||||
"name": "Enable charging"
|
||||
},
|
||||
"privacy_mode_button": {
|
||||
"name": "Privacy mode button"
|
||||
}
|
||||
},
|
||||
"vacuum": {
|
||||
|
@@ -263,6 +263,18 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
vendor_id=(4874,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterNumericSwitchEntityDescription(
|
||||
key="DoorLockEnablePrivacyModeButton",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="privacy_mode_button",
|
||||
device_to_ha=bool,
|
||||
ha_to_device=int,
|
||||
),
|
||||
entity_class=MatterNumericSwitch,
|
||||
required_attributes=(clusters.DoorLock.Attributes.EnablePrivacyModeButton,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterGenericCommandSwitchEntityDescription(
|
||||
|
@@ -9,6 +9,18 @@
|
||||
"url": "The remote MCP server URL for the SSE endpoint, for example http://example/sse"
|
||||
}
|
||||
},
|
||||
"credentials_choice": {
|
||||
"title": "Choose how to authenticate with the MCP server",
|
||||
"description": "You can either use existing credentials from another integration or set up new credentials.",
|
||||
"menu_options": {
|
||||
"new_credentials": "Set up new credentials",
|
||||
"pick_implementation": "Use existing credentials"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"new_credentials": "You will be guided through setting up a new OAuth Client ID and secret.",
|
||||
"pick_implementation": "You may use previously entered OAuth credentials."
|
||||
}
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
@@ -27,14 +39,21 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"missing_capabilities": "The MCP server does not support a required capability (Tools)",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"reauth_account_mismatch": "The authenticated user does not match the MCP Server user that needed re-authentication.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiomealie==0.10.2"]
|
||||
"requirements": ["aiomealie==0.11.0"]
|
||||
}
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.09.23"],
|
||||
"requirements": ["yt-dlp[default]==2025.09.26"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -25,7 +25,7 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_browse_media)
|
||||
websocket_api.async_register_command(hass, websocket_resolve_media)
|
||||
frontend.async_register_built_in_panel(
|
||||
hass, "media-browser", "media_browser", "hass:play-box-multiple"
|
||||
hass, "media-browser", "media_browser", "mdi:play-box-multiple"
|
||||
)
|
||||
|
||||
|
||||
|
@@ -51,10 +51,7 @@ from homeassistant.components.sensor import (
|
||||
DEVICE_CLASS_UNITS,
|
||||
STATE_CLASS_UNITS,
|
||||
SensorDeviceClass,
|
||||
)
|
||||
from homeassistant.components.sensor.helpers import (
|
||||
create_sensor_device_class_select_selector,
|
||||
create_sensor_state_class_select_selector,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.switch import SwitchDeviceClass
|
||||
from homeassistant.config_entries import (
|
||||
@@ -706,6 +703,14 @@ SCALE_SELECTOR = NumberSelector(
|
||||
step=1,
|
||||
)
|
||||
)
|
||||
SENSOR_DEVICE_CLASS_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in SensorDeviceClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="device_class_sensor",
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
SENSOR_ENTITY_CATEGORY_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[EntityCategory.DIAGNOSTIC.value],
|
||||
@@ -714,6 +719,13 @@ SENSOR_ENTITY_CATEGORY_SELECTOR = SelectSelector(
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
SENSOR_STATE_CLASS_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in SensorStateClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key=CONF_STATE_CLASS,
|
||||
)
|
||||
)
|
||||
SUPPORTED_COLOR_MODES_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[platform.value for platform in VALID_COLOR_MODES],
|
||||
@@ -1272,12 +1284,10 @@ PLATFORM_ENTITY_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
Platform.NOTIFY.value: {},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_DEVICE_CLASS: PlatformField(
|
||||
selector=create_sensor_device_class_select_selector(),
|
||||
required=False,
|
||||
selector=SENSOR_DEVICE_CLASS_SELECTOR, required=False
|
||||
),
|
||||
CONF_STATE_CLASS: PlatformField(
|
||||
selector=create_sensor_state_class_select_selector(),
|
||||
required=False,
|
||||
selector=SENSOR_STATE_CLASS_SELECTOR, required=False
|
||||
),
|
||||
CONF_UNIT_OF_MEASUREMENT: PlatformField(
|
||||
selector=unit_of_measurement_selector,
|
||||
|
@@ -1200,6 +1200,69 @@
|
||||
"window": "[%key:component::cover::entity_component::window::name%]"
|
||||
}
|
||||
},
|
||||
"device_class_sensor": {
|
||||
"options": {
|
||||
"absolute_humidity": "[%key:component::sensor::entity_component::absolute_humidity::name%]",
|
||||
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
|
||||
"area": "[%key:component::sensor::entity_component::area::name%]",
|
||||
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
|
||||
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
|
||||
"battery": "[%key:component::sensor::entity_component::battery::name%]",
|
||||
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
|
||||
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
|
||||
"current": "[%key:component::sensor::entity_component::current::name%]",
|
||||
"data_rate": "[%key:component::sensor::entity_component::data_rate::name%]",
|
||||
"data_size": "[%key:component::sensor::entity_component::data_size::name%]",
|
||||
"date": "[%key:component::sensor::entity_component::date::name%]",
|
||||
"distance": "[%key:component::sensor::entity_component::distance::name%]",
|
||||
"duration": "[%key:component::sensor::entity_component::duration::name%]",
|
||||
"energy": "[%key:component::sensor::entity_component::energy::name%]",
|
||||
"energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]",
|
||||
"energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]",
|
||||
"enum": "Enumeration",
|
||||
"frequency": "[%key:component::sensor::entity_component::frequency::name%]",
|
||||
"gas": "[%key:component::sensor::entity_component::gas::name%]",
|
||||
"humidity": "[%key:component::sensor::entity_component::humidity::name%]",
|
||||
"illuminance": "[%key:component::sensor::entity_component::illuminance::name%]",
|
||||
"irradiance": "[%key:component::sensor::entity_component::irradiance::name%]",
|
||||
"moisture": "[%key:component::sensor::entity_component::moisture::name%]",
|
||||
"monetary": "[%key:component::sensor::entity_component::monetary::name%]",
|
||||
"nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]",
|
||||
"nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]",
|
||||
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"ph": "[%key:component::sensor::entity_component::ph::name%]",
|
||||
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
|
||||
"pm4": "[%key:component::sensor::entity_component::pm4::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"power": "[%key:component::sensor::entity_component::power::name%]",
|
||||
"power_factor": "[%key:component::sensor::entity_component::power_factor::name%]",
|
||||
"precipitation": "[%key:component::sensor::entity_component::precipitation::name%]",
|
||||
"precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]",
|
||||
"pressure": "[%key:component::sensor::entity_component::pressure::name%]",
|
||||
"reactive_energy": "[%key:component::sensor::entity_component::reactive_energy::name%]",
|
||||
"reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]",
|
||||
"signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]",
|
||||
"sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]",
|
||||
"speed": "[%key:component::sensor::entity_component::speed::name%]",
|
||||
"sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]",
|
||||
"temperature": "[%key:component::sensor::entity_component::temperature::name%]",
|
||||
"timestamp": "[%key:component::sensor::entity_component::timestamp::name%]",
|
||||
"volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]",
|
||||
"volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds_parts::name%]",
|
||||
"voltage": "[%key:component::sensor::entity_component::voltage::name%]",
|
||||
"volume": "[%key:component::sensor::entity_component::volume::name%]",
|
||||
"volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]",
|
||||
"volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]",
|
||||
"water": "[%key:component::sensor::entity_component::water::name%]",
|
||||
"weight": "[%key:component::sensor::entity_component::weight::name%]",
|
||||
"wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]",
|
||||
"wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]"
|
||||
}
|
||||
},
|
||||
"device_class_switch": {
|
||||
"options": {
|
||||
"outlet": "[%key:component::switch::entity_component::outlet::name%]",
|
||||
@@ -1261,6 +1324,14 @@
|
||||
"custom": "Custom"
|
||||
}
|
||||
},
|
||||
"state_class": {
|
||||
"options": {
|
||||
"measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]",
|
||||
"measurement_angle": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement_angle%]",
|
||||
"total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]",
|
||||
"total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]"
|
||||
}
|
||||
},
|
||||
"supported_color_modes": {
|
||||
"options": {
|
||||
"onoff": "[%key:component::light::entity_component::_::state_attributes::color_mode::state::onoff%]",
|
||||
|
@@ -124,7 +124,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), mg/m³
|
||||
Unit of measurement: `ppm` (parts per million)
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -475,10 +475,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
|
||||
NumberDeviceClass.BATTERY: {PERCENTAGE},
|
||||
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
NumberDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
|
||||
NumberDeviceClass.CURRENT: set(UnitOfElectricCurrent),
|
||||
|
@@ -112,6 +112,9 @@
|
||||
"pm1": {
|
||||
"name": "[%key:component::sensor::entity_component::pm1::name%]"
|
||||
},
|
||||
"pm4": {
|
||||
"name": "[%key:component::sensor::entity_component::pm4::name%]"
|
||||
},
|
||||
"pm10": {
|
||||
"name": "[%key:component::sensor::entity_component::pm10::name%]"
|
||||
},
|
||||
|
@@ -341,12 +341,12 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
def process_update(self, message: status.Known) -> None:
|
||||
"""Process update."""
|
||||
match message:
|
||||
case status.Power(status.Power.Param.ON):
|
||||
case status.Power(param=status.Power.Param.ON):
|
||||
self._attr_state = MediaPlayerState.ON
|
||||
case status.Power(status.Power.Param.STANDBY):
|
||||
case status.Power(param=status.Power.Param.STANDBY):
|
||||
self._attr_state = MediaPlayerState.OFF
|
||||
|
||||
case status.Volume(volume):
|
||||
case status.Volume(param=volume):
|
||||
if not self._supports_volume:
|
||||
self._attr_supported_features |= SUPPORTED_FEATURES_VOLUME
|
||||
self._supports_volume = True
|
||||
@@ -356,10 +356,10 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
)
|
||||
self._attr_volume_level = min(1, volume_level)
|
||||
|
||||
case status.Muting(muting):
|
||||
case status.Muting(param=muting):
|
||||
self._attr_is_volume_muted = bool(muting == status.Muting.Param.ON)
|
||||
|
||||
case status.InputSource(source):
|
||||
case status.InputSource(param=source):
|
||||
if source in self._source_mapping:
|
||||
self._attr_source = self._source_mapping[source]
|
||||
else:
|
||||
@@ -373,7 +373,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
|
||||
self._query_av_info_delayed()
|
||||
|
||||
case status.ListeningMode(sound_mode):
|
||||
case status.ListeningMode(param=sound_mode):
|
||||
if not self._supports_sound_mode:
|
||||
self._attr_supported_features |= (
|
||||
MediaPlayerEntityFeature.SELECT_SOUND_MODE
|
||||
@@ -393,13 +393,13 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
|
||||
self._query_av_info_delayed()
|
||||
|
||||
case status.HDMIOutput(hdmi_output):
|
||||
case status.HDMIOutput(param=hdmi_output):
|
||||
self._attr_extra_state_attributes[ATTR_VIDEO_OUT] = (
|
||||
self._hdmi_output_mapping[hdmi_output]
|
||||
)
|
||||
self._query_av_info_delayed()
|
||||
|
||||
case status.TunerPreset(preset):
|
||||
case status.TunerPreset(param=preset):
|
||||
self._attr_extra_state_attributes[ATTR_PRESET] = preset
|
||||
|
||||
case status.AudioInformation():
|
||||
@@ -427,11 +427,11 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
case status.FLDisplay():
|
||||
self._query_av_info_delayed()
|
||||
|
||||
case status.NotAvailable(Kind.AUDIO_INFORMATION):
|
||||
case status.NotAvailable(kind=Kind.AUDIO_INFORMATION):
|
||||
# Not available right now, but still supported
|
||||
self._supports_audio_info = True
|
||||
|
||||
case status.NotAvailable(Kind.VIDEO_INFORMATION):
|
||||
case status.NotAvailable(kind=Kind.VIDEO_INFORMATION):
|
||||
# Not available right now, but still supported
|
||||
self._supports_video_info = True
|
||||
|
||||
|
@@ -2,10 +2,12 @@ get_profile:
|
||||
target:
|
||||
entity:
|
||||
domain: water_heater
|
||||
integration: osoenergy
|
||||
set_profile:
|
||||
target:
|
||||
entity:
|
||||
domain: water_heater
|
||||
integration: osoenergy
|
||||
fields:
|
||||
hour_00:
|
||||
required: false
|
||||
@@ -227,6 +229,7 @@ set_v40_min:
|
||||
target:
|
||||
entity:
|
||||
domain: water_heater
|
||||
integration: osoenergy
|
||||
fields:
|
||||
v40_min:
|
||||
required: true
|
||||
@@ -241,6 +244,7 @@ turn_away_mode_on:
|
||||
target:
|
||||
entity:
|
||||
domain: water_heater
|
||||
integration: osoenergy
|
||||
fields:
|
||||
duration_days:
|
||||
required: true
|
||||
@@ -255,6 +259,7 @@ turn_off:
|
||||
target:
|
||||
entity:
|
||||
domain: water_heater
|
||||
integration: osoenergy
|
||||
fields:
|
||||
until_temp_limit:
|
||||
required: true
|
||||
@@ -266,6 +271,7 @@ turn_on:
|
||||
target:
|
||||
entity:
|
||||
domain: water_heater
|
||||
integration: osoenergy
|
||||
fields:
|
||||
until_temp_limit:
|
||||
required: true
|
||||
|
@@ -129,10 +129,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: PiHoleConfigEntry) -> bo
|
||||
raise ConfigEntryAuthFailed
|
||||
except HoleError as err:
|
||||
if str(err) == "Authentication failed: Invalid password":
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise UpdateFailed(f"Failed to communicate with API: {err}") from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Pi-hole {name} at host {host}, reported an invalid password"
|
||||
) from err
|
||||
raise UpdateFailed(
|
||||
f"Pi-hole {name} at host {host}, update failed with HoleError: {err}"
|
||||
) from err
|
||||
if not isinstance(api.data, dict):
|
||||
raise ConfigEntryAuthFailed
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Pi-hole {name} at host {host}, returned an unexpected response: {api.data}, assuming authentication failed"
|
||||
)
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
|
@@ -114,6 +114,7 @@
|
||||
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"ph": "[%key:component::sensor::entity_component::ph::name%]",
|
||||
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
|
||||
"pm4": "[%key:component::sensor::entity_component::pm4::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"power": "[%key:component::sensor::entity_component::power::name%]",
|
||||
|
@@ -46,7 +46,6 @@ from homeassistant.util.unit_conversion import (
|
||||
AreaConverter,
|
||||
BaseUnitConverter,
|
||||
BloodGlucoseConcentrationConverter,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
ConductivityConverter,
|
||||
DataRateConverter,
|
||||
DistanceConverter,
|
||||
@@ -205,10 +204,6 @@ STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
||||
**dict.fromkeys(
|
||||
MassVolumeConcentrationConverter.VALID_UNITS, MassVolumeConcentrationConverter
|
||||
),
|
||||
**dict.fromkeys(
|
||||
CarbonMonoxideConcentrationConverter.VALID_UNITS,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
),
|
||||
**dict.fromkeys(ConductivityConverter.VALID_UNITS, ConductivityConverter),
|
||||
**dict.fromkeys(DataRateConverter.VALID_UNITS, DataRateConverter),
|
||||
**dict.fromkeys(DistanceConverter.VALID_UNITS, DistanceConverter),
|
||||
|
@@ -19,7 +19,6 @@ from homeassistant.util.unit_conversion import (
|
||||
ApparentPowerConverter,
|
||||
AreaConverter,
|
||||
BloodGlucoseConcentrationConverter,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
ConductivityConverter,
|
||||
DataRateConverter,
|
||||
DistanceConverter,
|
||||
@@ -67,9 +66,6 @@ UNIT_SCHEMA = vol.Schema(
|
||||
vol.Optional("blood_glucose_concentration"): vol.In(
|
||||
BloodGlucoseConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("carbon_monoxide"): vol.In(
|
||||
CarbonMonoxideConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
vol.Optional("concentration"): vol.In(
|
||||
MassVolumeConcentrationConverter.VALID_UNITS
|
||||
),
|
||||
|
@@ -74,21 +74,28 @@ BINARY_PUSH_SENSORS = (
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key=PERSON_DETECTION_TYPE,
|
||||
cmd_id=[33, 600],
|
||||
cmd_id=[33, 600, 696],
|
||||
translation_key="person",
|
||||
value=lambda api, ch: api.ai_detected(ch, PERSON_DETECTION_TYPE),
|
||||
supported=lambda api, ch: api.ai_supported(ch, PERSON_DETECTION_TYPE),
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key=VEHICLE_DETECTION_TYPE,
|
||||
cmd_id=[33, 600],
|
||||
cmd_id=[33, 600, 696],
|
||||
translation_key="vehicle",
|
||||
value=lambda api, ch: api.ai_detected(ch, VEHICLE_DETECTION_TYPE),
|
||||
supported=lambda api, ch: api.ai_supported(ch, VEHICLE_DETECTION_TYPE),
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key="non-motor_vehicle",
|
||||
cmd_id=[600, 696],
|
||||
translation_key="non-motor_vehicle",
|
||||
value=lambda api, ch: api.ai_detected(ch, "non-motor vehicle"),
|
||||
supported=lambda api, ch: api.supported(ch, "ai_non-motor vehicle"),
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key=PET_DETECTION_TYPE,
|
||||
cmd_id=[33, 600],
|
||||
cmd_id=[33, 600, 696],
|
||||
translation_key="pet",
|
||||
value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE),
|
||||
supported=lambda api, ch: (
|
||||
@@ -98,14 +105,14 @@ BINARY_PUSH_SENSORS = (
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key=PET_DETECTION_TYPE,
|
||||
cmd_id=[33, 600],
|
||||
cmd_id=[33, 600, 696],
|
||||
translation_key="animal",
|
||||
value=lambda api, ch: api.ai_detected(ch, PET_DETECTION_TYPE),
|
||||
supported=lambda api, ch: api.supported(ch, "ai_animal"),
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key=PACKAGE_DETECTION_TYPE,
|
||||
cmd_id=[33, 600],
|
||||
cmd_id=[33, 600, 696],
|
||||
translation_key="package",
|
||||
value=lambda api, ch: api.ai_detected(ch, PACKAGE_DETECTION_TYPE),
|
||||
supported=lambda api, ch: api.ai_supported(ch, PACKAGE_DETECTION_TYPE),
|
||||
@@ -120,7 +127,7 @@ BINARY_PUSH_SENSORS = (
|
||||
),
|
||||
ReolinkBinarySensorEntityDescription(
|
||||
key="cry",
|
||||
cmd_id=[33, 600],
|
||||
cmd_id=[33],
|
||||
translation_key="cry",
|
||||
value=lambda api, ch: api.ai_detected(ch, "cry"),
|
||||
supported=lambda api, ch: api.ai_supported(ch, "cry"),
|
||||
|
@@ -13,6 +13,12 @@
|
||||
"on": "mdi:car"
|
||||
}
|
||||
},
|
||||
"non-motor_vehicle": {
|
||||
"default": "mdi:motorbike-off",
|
||||
"state": {
|
||||
"on": "mdi:motorbike"
|
||||
}
|
||||
},
|
||||
"pet": {
|
||||
"default": "mdi:dog-side-off",
|
||||
"state": {
|
||||
@@ -172,9 +178,18 @@
|
||||
"floodlight_brightness": {
|
||||
"default": "mdi:spotlight-beam"
|
||||
},
|
||||
"floodlight_event_brightness": {
|
||||
"default": "mdi:spotlight-beam"
|
||||
},
|
||||
"ir_brightness": {
|
||||
"default": "mdi:led-off"
|
||||
},
|
||||
"floodlight_event_on_time": {
|
||||
"default": "mdi:spotlight-beam"
|
||||
},
|
||||
"floodlight_event_flash_time": {
|
||||
"default": "mdi:spotlight-beam"
|
||||
},
|
||||
"volume": {
|
||||
"default": "mdi:volume-high",
|
||||
"state": {
|
||||
@@ -223,6 +238,9 @@
|
||||
"ai_vehicle_sensitivity": {
|
||||
"default": "mdi:car"
|
||||
},
|
||||
"ai_non_motor_vehicle_sensitivity": {
|
||||
"default": "mdi:bicycle"
|
||||
},
|
||||
"ai_package_sensitivity": {
|
||||
"default": "mdi:gift-outline"
|
||||
},
|
||||
@@ -259,6 +277,9 @@
|
||||
"ai_vehicle_delay": {
|
||||
"default": "mdi:car"
|
||||
},
|
||||
"ai_non_motor_vehicle_delay": {
|
||||
"default": "mdi:bicycle"
|
||||
},
|
||||
"ai_package_delay": {
|
||||
"default": "mdi:gift-outline"
|
||||
},
|
||||
@@ -327,6 +348,9 @@
|
||||
"floodlight_mode": {
|
||||
"default": "mdi:spotlight-beam"
|
||||
},
|
||||
"floodlight_event_mode": {
|
||||
"default": "mdi:spotlight-beam"
|
||||
},
|
||||
"day_night_mode": {
|
||||
"default": "mdi:theme-light-dark"
|
||||
},
|
||||
@@ -456,6 +480,15 @@
|
||||
},
|
||||
"sd_storage": {
|
||||
"default": "mdi:micro-sd"
|
||||
},
|
||||
"person_type": {
|
||||
"default": "mdi:account"
|
||||
},
|
||||
"vehicle_type": {
|
||||
"default": "mdi:car"
|
||||
},
|
||||
"animal_type": {
|
||||
"default": "mdi:paw"
|
||||
}
|
||||
},
|
||||
"siren": {
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.15.2"]
|
||||
"requirements": ["reolink-aio==0.16.0"]
|
||||
}
|
||||
|
@@ -125,6 +125,22 @@ NUMBER_ENTITIES = (
|
||||
value=lambda api, ch: api.whiteled_brightness(ch),
|
||||
method=lambda api, ch, value: api.set_whiteled(ch, brightness=int(value)),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="floodlight_event_brightness",
|
||||
cmd_key="GetWhiteLed",
|
||||
cmd_id=[289, 438],
|
||||
translation_key="floodlight_event_brightness",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
native_step=1,
|
||||
native_min_value=1,
|
||||
native_max_value=100,
|
||||
supported=lambda api, ch: api.supported(ch, "floodlight_event"),
|
||||
value=lambda api, ch: api.whiteled_event_brightness(ch),
|
||||
method=lambda api, ch, value: (
|
||||
api.baichuan.set_floodlight(ch, event_brightness=int(value))
|
||||
),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="ir_brightness",
|
||||
cmd_key="208",
|
||||
@@ -139,6 +155,42 @@ NUMBER_ENTITIES = (
|
||||
api.baichuan.set_status_led(ch, ir_brightness=int(value))
|
||||
),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="floodlight_event_on_time",
|
||||
cmd_key="GetWhiteLed",
|
||||
cmd_id=[289, 438],
|
||||
translation_key="floodlight_event_on_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
entity_registry_enabled_default=False,
|
||||
native_step=1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_min_value=30,
|
||||
native_max_value=900,
|
||||
supported=lambda api, ch: api.supported(ch, "floodlight_event"),
|
||||
value=lambda api, ch: api.whiteled_event_on_time(ch),
|
||||
method=lambda api, ch, value: (
|
||||
api.baichuan.set_floodlight(ch, event_on_time=int(value))
|
||||
),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="floodlight_event_flash_time",
|
||||
cmd_key="GetWhiteLed",
|
||||
cmd_id=[289, 438],
|
||||
translation_key="floodlight_event_flash_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
entity_registry_enabled_default=False,
|
||||
native_step=1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_min_value=10,
|
||||
native_max_value=30,
|
||||
supported=lambda api, ch: api.supported(ch, "floodlight_event"),
|
||||
value=lambda api, ch: api.whiteled_event_flash_time(ch),
|
||||
method=lambda api, ch, value: (
|
||||
api.baichuan.set_floodlight(ch, event_flash_time=int(value))
|
||||
),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="volume",
|
||||
cmd_key="GetAudioCfg",
|
||||
@@ -255,6 +307,23 @@ NUMBER_ENTITIES = (
|
||||
value=lambda api, ch: api.ai_sensitivity(ch, "vehicle"),
|
||||
method=lambda api, ch, value: api.set_ai_sensitivity(ch, int(value), "vehicle"),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="ai_non_motor_vehicle_sensitivity",
|
||||
cmd_key="GetAiAlarm",
|
||||
translation_key="ai_non_motor_vehicle_sensitivity",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=1,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
supported=lambda api, ch: (
|
||||
api.supported(ch, "ai_sensitivity")
|
||||
and api.supported(ch, "ai_non-motor vehicle")
|
||||
),
|
||||
value=lambda api, ch: api.ai_sensitivity(ch, "non-motor vehicle"),
|
||||
method=lambda api, ch, value: (
|
||||
api.set_ai_sensitivity(ch, int(value), "non-motor vehicle")
|
||||
),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="ai_package_sensititvity",
|
||||
cmd_key="GetAiAlarm",
|
||||
@@ -345,6 +414,25 @@ NUMBER_ENTITIES = (
|
||||
value=lambda api, ch: api.ai_delay(ch, "people"),
|
||||
method=lambda api, ch, value: api.set_ai_delay(ch, int(value), "people"),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="ai_non_motor_vehicle_delay",
|
||||
cmd_key="GetAiAlarm",
|
||||
translation_key="ai_non_motor_vehicle_delay",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
entity_registry_enabled_default=False,
|
||||
native_step=1,
|
||||
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||
native_min_value=0,
|
||||
native_max_value=8,
|
||||
supported=lambda api, ch: (
|
||||
api.supported(ch, "ai_delay") and api.supported(ch, "ai_non-motor vehicle")
|
||||
),
|
||||
value=lambda api, ch: api.ai_delay(ch, "non-motor vehicle"),
|
||||
method=lambda api, ch, value: (
|
||||
api.set_ai_delay(ch, int(value), "non-motor vehicle")
|
||||
),
|
||||
),
|
||||
ReolinkNumberEntityDescription(
|
||||
key="ai_vehicle_delay",
|
||||
cmd_key="GetAiAlarm",
|
||||
|
@@ -16,6 +16,7 @@ from reolink_aio.api import (
|
||||
HDREnum,
|
||||
Host,
|
||||
HubToneEnum,
|
||||
SpotlightEventModeEnum,
|
||||
SpotlightModeEnum,
|
||||
StatusLedEnum,
|
||||
TrackMethodEnum,
|
||||
@@ -86,6 +87,7 @@ SELECT_ENTITIES = (
|
||||
ReolinkSelectEntityDescription(
|
||||
key="floodlight_mode",
|
||||
cmd_key="GetWhiteLed",
|
||||
cmd_id=[289, 438],
|
||||
translation_key="floodlight_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
get_options=lambda api, ch: api.whiteled_mode_list(ch),
|
||||
@@ -93,6 +95,21 @@ SELECT_ENTITIES = (
|
||||
value=lambda api, ch: SpotlightModeEnum(api.whiteled_mode(ch)).name,
|
||||
method=lambda api, ch, name: api.set_whiteled(ch, mode=name),
|
||||
),
|
||||
ReolinkSelectEntityDescription(
|
||||
key="floodlight_event_mode",
|
||||
cmd_key="GetWhiteLed",
|
||||
cmd_id=[289, 438],
|
||||
translation_key="floodlight_event_mode",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
get_options=[mode.name for mode in SpotlightEventModeEnum],
|
||||
supported=lambda api, ch: api.supported(ch, "floodlight_event"),
|
||||
value=lambda api, ch: SpotlightEventModeEnum(api.whiteled_event_mode(ch)).name,
|
||||
method=lambda api, ch, name: (
|
||||
api.baichuan.set_floodlight(
|
||||
ch, event_mode=SpotlightEventModeEnum[name].value
|
||||
)
|
||||
),
|
||||
),
|
||||
ReolinkSelectEntityDescription(
|
||||
key="day_night_mode",
|
||||
cmd_key="GetIsp",
|
||||
|
@@ -8,6 +8,7 @@ from datetime import date, datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from reolink_aio.api import Host
|
||||
from reolink_aio.const import YOLO_DETECT_TYPES
|
||||
from reolink_aio.enums import BatteryEnum
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -135,6 +136,39 @@ SENSORS = (
|
||||
value=lambda api, ch: api.wifi_signal(ch),
|
||||
supported=lambda api, ch: api.supported(ch, "wifi"),
|
||||
),
|
||||
ReolinkSensorEntityDescription(
|
||||
key="person_type",
|
||||
cmd_id=696,
|
||||
translation_key="person_type",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=YOLO_DETECT_TYPES["people"],
|
||||
value=lambda api, ch: api.baichuan.ai_detect_type(ch, "person"),
|
||||
supported=lambda api, ch: (
|
||||
api.supported(ch, "ai_yolo_type") and api.supported(ch, "ai_people")
|
||||
),
|
||||
),
|
||||
ReolinkSensorEntityDescription(
|
||||
key="vehicle_type",
|
||||
cmd_id=696,
|
||||
translation_key="vehicle_type",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=YOLO_DETECT_TYPES["vehicle"],
|
||||
value=lambda api, ch: api.baichuan.ai_detect_type(ch, "vehicle"),
|
||||
supported=lambda api, ch: (
|
||||
api.supported(ch, "ai_yolo_type") and api.supported(ch, "ai_vehicle")
|
||||
),
|
||||
),
|
||||
ReolinkSensorEntityDescription(
|
||||
key="animal_type",
|
||||
cmd_id=696,
|
||||
translation_key="animal_type",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=YOLO_DETECT_TYPES["dog_cat"],
|
||||
value=lambda api, ch: api.baichuan.ai_detect_type(ch, "dog_cat"),
|
||||
supported=lambda api, ch: (
|
||||
api.supported(ch, "ai_yolo_type") and api.supported(ch, "ai_dog_cat")
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
HOST_SENSORS = (
|
||||
|
@@ -43,6 +43,7 @@ class ReolinkHostSirenEntityDescription(
|
||||
SIREN_ENTITIES = (
|
||||
ReolinkSirenEntityDescription(
|
||||
key="siren",
|
||||
cmd_id=547,
|
||||
translation_key="siren",
|
||||
supported=lambda api, ch: api.supported(ch, "siren_play"),
|
||||
),
|
||||
@@ -100,6 +101,11 @@ class ReolinkSirenEntity(ReolinkChannelCoordinatorEntity, SirenEntity):
|
||||
self.entity_description = entity_description
|
||||
super().__init__(reolink_data, channel)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool | None:
|
||||
"""State of the siren."""
|
||||
return self._host.api.baichuan.siren_state(self._channel)
|
||||
|
||||
@raise_translated_error
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the siren."""
|
||||
|
@@ -206,6 +206,13 @@
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
},
|
||||
"non-motor_vehicle": {
|
||||
"name": "Bicycle",
|
||||
"state": {
|
||||
"off": "[%key:component::binary_sensor::entity_component::gas::state::off%]",
|
||||
"on": "[%key:component::binary_sensor::entity_component::gas::state::on%]"
|
||||
}
|
||||
},
|
||||
"pet": {
|
||||
"name": "Pet",
|
||||
"state": {
|
||||
@@ -535,9 +542,18 @@
|
||||
"floodlight_brightness": {
|
||||
"name": "Floodlight turn on brightness"
|
||||
},
|
||||
"floodlight_event_brightness": {
|
||||
"name": "Floodlight event brightness"
|
||||
},
|
||||
"ir_brightness": {
|
||||
"name": "Infrared light brightness"
|
||||
},
|
||||
"floodlight_event_on_time": {
|
||||
"name": "Floodlight event on time"
|
||||
},
|
||||
"floodlight_event_flash_time": {
|
||||
"name": "Floodlight event flash time"
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume"
|
||||
},
|
||||
@@ -571,6 +587,9 @@
|
||||
"ai_vehicle_sensitivity": {
|
||||
"name": "AI vehicle sensitivity"
|
||||
},
|
||||
"ai_non_motor_vehicle_sensitivity": {
|
||||
"name": "AI bicycle sensitivity"
|
||||
},
|
||||
"ai_package_sensitivity": {
|
||||
"name": "AI package sensitivity"
|
||||
},
|
||||
@@ -607,6 +626,9 @@
|
||||
"ai_vehicle_delay": {
|
||||
"name": "AI vehicle delay"
|
||||
},
|
||||
"ai_non_motor_vehicle_delay": {
|
||||
"name": "AI bicycle delay"
|
||||
},
|
||||
"ai_package_delay": {
|
||||
"name": "AI package delay"
|
||||
},
|
||||
@@ -683,6 +705,14 @@
|
||||
"autoadaptive": "Auto adaptive"
|
||||
}
|
||||
},
|
||||
"floodlight_event_mode": {
|
||||
"name": "Floodlight event mode",
|
||||
"state": {
|
||||
"off": "[%key:common::state::off%]",
|
||||
"on": "[%key:common::state::on%]",
|
||||
"flash": "Flash"
|
||||
}
|
||||
},
|
||||
"day_night_mode": {
|
||||
"name": "Day night mode",
|
||||
"state": {
|
||||
@@ -923,6 +953,29 @@
|
||||
},
|
||||
"sd_storage": {
|
||||
"name": "SD {hdd_index} storage"
|
||||
},
|
||||
"person_type": {
|
||||
"name": "Person type",
|
||||
"state": {
|
||||
"man": "Man",
|
||||
"woman": "Woman"
|
||||
}
|
||||
},
|
||||
"vehicle_type": {
|
||||
"name": "Vehicle type",
|
||||
"state": {
|
||||
"sedan": "Sedan",
|
||||
"suv": "SUV",
|
||||
"pickup_truck": "Pickup truck",
|
||||
"motorcycle": "Motorcycle"
|
||||
}
|
||||
},
|
||||
"animal_type": {
|
||||
"name": "Animal type",
|
||||
"state": {
|
||||
"dog": "Dog",
|
||||
"cat": "Cat"
|
||||
}
|
||||
}
|
||||
},
|
||||
"siren": {
|
||||
|
@@ -66,6 +66,16 @@ class IRobotEntity(Entity):
|
||||
"""Return the battery stats."""
|
||||
return self.vacuum_state.get("bbchg3", {})
|
||||
|
||||
@property
|
||||
def tank_level(self) -> int | None:
|
||||
"""Return the tank level."""
|
||||
return self.vacuum_state.get("tankLvl")
|
||||
|
||||
@property
|
||||
def dock_tank_level(self) -> int | None:
|
||||
"""Return the dock tank level."""
|
||||
return self.vacuum_state.get("dock", {}).get("tankLvl")
|
||||
|
||||
@property
|
||||
def last_mission(self):
|
||||
"""Return last mission start time."""
|
||||
|
@@ -35,6 +35,12 @@
|
||||
},
|
||||
"last_mission": {
|
||||
"default": "mdi:calendar-clock"
|
||||
},
|
||||
"tank_level": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
"dock_tank_level": {
|
||||
"default": "mdi:water"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import IRobotEntity
|
||||
from .entity import IRobotEntity, roomba_reported_state
|
||||
from .models import RoombaData
|
||||
|
||||
|
||||
@@ -29,6 +29,16 @@ class RoombaSensorEntityDescription(SensorEntityDescription):
|
||||
value_fn: Callable[[IRobotEntity], StateType]
|
||||
|
||||
|
||||
DOCK_SENSORS: list[RoombaSensorEntityDescription] = [
|
||||
RoombaSensorEntityDescription(
|
||||
key="dock_tank_level",
|
||||
translation_key="dock_tank_level",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda self: self.dock_tank_level,
|
||||
),
|
||||
]
|
||||
|
||||
SENSORS: list[RoombaSensorEntityDescription] = [
|
||||
RoombaSensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -37,6 +47,13 @@ SENSORS: list[RoombaSensorEntityDescription] = [
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda self: self.vacuum_state.get("batPct"),
|
||||
),
|
||||
RoombaSensorEntityDescription(
|
||||
key="tank_level",
|
||||
translation_key="tank_level",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda self: self.tank_level,
|
||||
),
|
||||
RoombaSensorEntityDescription(
|
||||
key="battery_cycles",
|
||||
translation_key="battery_cycles",
|
||||
@@ -132,8 +149,16 @@ async def async_setup_entry(
|
||||
roomba = domain_data.roomba
|
||||
blid = domain_data.blid
|
||||
|
||||
sensor_list: list[RoombaSensorEntityDescription] = SENSORS
|
||||
|
||||
has_dock: bool = len(roomba_reported_state(roomba).get("dock", {})) > 0
|
||||
|
||||
if has_dock:
|
||||
sensor_list.extend(DOCK_SENSORS)
|
||||
|
||||
async_add_entities(
|
||||
RoombaSensor(roomba, blid, entity_description) for entity_description in SENSORS
|
||||
RoombaSensor(roomba, blid, entity_description)
|
||||
for entity_description in sensor_list
|
||||
)
|
||||
|
||||
|
||||
|
@@ -90,6 +90,12 @@
|
||||
},
|
||||
"last_mission": {
|
||||
"name": "Last mission start time"
|
||||
},
|
||||
"tank_level": {
|
||||
"name": "Tank level"
|
||||
},
|
||||
"dock_tank_level": {
|
||||
"name": "Dock tank level"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -403,11 +403,16 @@ class BraavaJet(IRobotVacuum):
|
||||
detected_pad = state.get("detectedPad")
|
||||
mop_ready = state.get("mopReady", {})
|
||||
lid_closed = mop_ready.get("lidClosed")
|
||||
tank_present = mop_ready.get("tankPresent")
|
||||
tank_present = mop_ready.get("tankPresent") or state.get("tankPresent")
|
||||
tank_level = state.get("tankLvl")
|
||||
state_attrs[ATTR_DETECTED_PAD] = detected_pad
|
||||
state_attrs[ATTR_LID_CLOSED] = lid_closed
|
||||
state_attrs[ATTR_TANK_PRESENT] = tank_present
|
||||
state_attrs[ATTR_TANK_LEVEL] = tank_level
|
||||
bin_raw_state = state.get("bin", {})
|
||||
if bin_raw_state.get("present") is not None:
|
||||
state_attrs[ATTR_BIN_PRESENT] = bin_raw_state.get("present")
|
||||
if bin_raw_state.get("full") is not None:
|
||||
state_attrs[ATTR_BIN_FULL] = bin_raw_state.get("full")
|
||||
|
||||
return state_attrs
|
||||
|
@@ -197,6 +197,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> boo
|
||||
def _close(*_):
|
||||
controller.close()
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close))
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
@@ -239,3 +240,8 @@ async def async_unload_entry(hass: HomeAssistant, entry: SatelConfigEntry) -> bo
|
||||
controller.close()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: SatelConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
@@ -171,6 +171,7 @@
|
||||
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"ph": "[%key:component::sensor::entity_component::ph::name%]",
|
||||
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
|
||||
"pm4": "[%key:component::sensor::entity_component::pm4::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"power": "[%key:component::sensor::entity_component::power::name%]",
|
||||
@@ -178,6 +179,7 @@
|
||||
"precipitation": "[%key:component::sensor::entity_component::precipitation::name%]",
|
||||
"precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]",
|
||||
"pressure": "[%key:component::sensor::entity_component::pressure::name%]",
|
||||
"reactive_energy": "[%key:component::sensor::entity_component::reactive_energy::name%]",
|
||||
"reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]",
|
||||
"signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]",
|
||||
"sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]",
|
||||
|
@@ -51,7 +51,6 @@ from homeassistant.util.unit_conversion import (
|
||||
AreaConverter,
|
||||
BaseUnitConverter,
|
||||
BloodGlucoseConcentrationConverter,
|
||||
CarbonMonoxideConcentrationConverter,
|
||||
ConductivityConverter,
|
||||
DataRateConverter,
|
||||
DistanceConverter,
|
||||
@@ -157,7 +156,7 @@ class SensorDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million), `mg/m³`
|
||||
Unit of measurement: `ppm` (parts per million)
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -544,7 +543,6 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] =
|
||||
SensorDeviceClass.AREA: AreaConverter,
|
||||
SensorDeviceClass.ATMOSPHERIC_PRESSURE: PressureConverter,
|
||||
SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: BloodGlucoseConcentrationConverter,
|
||||
SensorDeviceClass.CO: CarbonMonoxideConcentrationConverter,
|
||||
SensorDeviceClass.CONDUCTIVITY: ConductivityConverter,
|
||||
SensorDeviceClass.CURRENT: ElectricCurrentConverter,
|
||||
SensorDeviceClass.DATA_RATE: DataRateConverter,
|
||||
@@ -586,10 +584,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
SensorDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
|
||||
SensorDeviceClass.BATTERY: {PERCENTAGE},
|
||||
SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
SensorDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
SensorDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
SensorDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
SensorDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
|
||||
SensorDeviceClass.CURRENT: set(UnitOfElectricCurrent),
|
||||
|
@@ -6,14 +6,9 @@ from datetime import date, datetime
|
||||
import logging
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import DOMAIN, SensorDeviceClass, SensorStateClass
|
||||
from . import SensorDeviceClass
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,31 +37,3 @@ def async_parse_date_datetime(
|
||||
|
||||
_LOGGER.warning("%s rendered invalid date %s", entity_id, value)
|
||||
return None
|
||||
|
||||
|
||||
@callback
|
||||
def create_sensor_device_class_select_selector() -> SelectSelector:
|
||||
"""Create sensor device class select selector."""
|
||||
return SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in SensorDeviceClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="device_class",
|
||||
translation_domain=DOMAIN,
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def create_sensor_state_class_select_selector() -> SelectSelector:
|
||||
"""Create sensor state class select selector."""
|
||||
return SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in SensorStateClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="state_class",
|
||||
translation_domain=DOMAIN,
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
|
@@ -245,6 +245,9 @@
|
||||
"pm1": {
|
||||
"name": "PM1"
|
||||
},
|
||||
"pm4": {
|
||||
"name": "PM4"
|
||||
},
|
||||
"pm10": {
|
||||
"name": "PM10"
|
||||
},
|
||||
@@ -334,76 +337,5 @@
|
||||
"title": "The unit of {statistic_id} has changed",
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"device_class": {
|
||||
"options": {
|
||||
"absolute_humidity": "[%key:component::sensor::entity_component::absolute_humidity::name%]",
|
||||
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
|
||||
"area": "[%key:component::sensor::entity_component::area::name%]",
|
||||
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
|
||||
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
|
||||
"battery": "[%key:component::sensor::entity_component::battery::name%]",
|
||||
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
|
||||
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
|
||||
"current": "[%key:component::sensor::entity_component::current::name%]",
|
||||
"data_rate": "[%key:component::sensor::entity_component::data_rate::name%]",
|
||||
"data_size": "[%key:component::sensor::entity_component::data_size::name%]",
|
||||
"date": "[%key:component::sensor::entity_component::date::name%]",
|
||||
"distance": "[%key:component::sensor::entity_component::distance::name%]",
|
||||
"duration": "[%key:component::sensor::entity_component::duration::name%]",
|
||||
"energy": "[%key:component::sensor::entity_component::energy::name%]",
|
||||
"energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]",
|
||||
"energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]",
|
||||
"enum": "Enumeration",
|
||||
"frequency": "[%key:component::sensor::entity_component::frequency::name%]",
|
||||
"gas": "[%key:component::sensor::entity_component::gas::name%]",
|
||||
"humidity": "[%key:component::sensor::entity_component::humidity::name%]",
|
||||
"illuminance": "[%key:component::sensor::entity_component::illuminance::name%]",
|
||||
"irradiance": "[%key:component::sensor::entity_component::irradiance::name%]",
|
||||
"moisture": "[%key:component::sensor::entity_component::moisture::name%]",
|
||||
"monetary": "[%key:component::sensor::entity_component::monetary::name%]",
|
||||
"nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]",
|
||||
"nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]",
|
||||
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"ph": "[%key:component::sensor::entity_component::ph::name%]",
|
||||
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"power": "[%key:component::sensor::entity_component::power::name%]",
|
||||
"power_factor": "[%key:component::sensor::entity_component::power_factor::name%]",
|
||||
"precipitation": "[%key:component::sensor::entity_component::precipitation::name%]",
|
||||
"precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]",
|
||||
"pressure": "[%key:component::sensor::entity_component::pressure::name%]",
|
||||
"reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]",
|
||||
"signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]",
|
||||
"sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]",
|
||||
"speed": "[%key:component::sensor::entity_component::speed::name%]",
|
||||
"sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]",
|
||||
"temperature": "[%key:component::sensor::entity_component::temperature::name%]",
|
||||
"timestamp": "[%key:component::sensor::entity_component::timestamp::name%]",
|
||||
"volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]",
|
||||
"volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]",
|
||||
"voltage": "[%key:component::sensor::entity_component::voltage::name%]",
|
||||
"volume": "[%key:component::sensor::entity_component::volume::name%]",
|
||||
"volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]",
|
||||
"volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]",
|
||||
"water": "[%key:component::sensor::entity_component::water::name%]",
|
||||
"weight": "[%key:component::sensor::entity_component::weight::name%]",
|
||||
"wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]",
|
||||
"wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]"
|
||||
}
|
||||
},
|
||||
"state_class": {
|
||||
"options": {
|
||||
"measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]",
|
||||
"measurement_angle": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement_angle%]",
|
||||
"total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]",
|
||||
"total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -20,6 +20,9 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"charger_state": {
|
||||
"default": "mdi:ev-station"
|
||||
},
|
||||
"detected_objects": {
|
||||
"default": "mdi:account-group"
|
||||
},
|
||||
|
@@ -33,6 +33,7 @@ from homeassistant.const import (
|
||||
UnitOfPower,
|
||||
UnitOfPressure,
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
UnitOfVolume,
|
||||
UnitOfVolumeFlowRate,
|
||||
)
|
||||
@@ -121,6 +122,23 @@ class RpcSensor(ShellyRpcAttributeEntity, SensorEntity):
|
||||
return self.option_map[attribute_value]
|
||||
|
||||
|
||||
class RpcConsumedEnergySensor(RpcSensor):
|
||||
"""Represent a RPC sensor."""
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return value of sensor."""
|
||||
total_energy = self.status["aenergy"]["total"]
|
||||
|
||||
if not isinstance(total_energy, float):
|
||||
return None
|
||||
|
||||
if not isinstance(self.attribute_value, float):
|
||||
return None
|
||||
|
||||
return total_energy - self.attribute_value
|
||||
|
||||
|
||||
class RpcPresenceSensor(RpcSensor):
|
||||
"""Represent a RPC presence sensor."""
|
||||
|
||||
@@ -884,7 +902,7 @@ RPC_SENSORS: Final = {
|
||||
"energy": RpcSensorDescription(
|
||||
key="switch",
|
||||
sub_key="aenergy",
|
||||
name="Energy",
|
||||
name="Total energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value=lambda status, _: status["total"],
|
||||
@@ -902,7 +920,22 @@ RPC_SENSORS: Final = {
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
removal_condition=lambda _config, status, key: (
|
||||
status[key].get("ret_aenergy") is None
|
||||
),
|
||||
),
|
||||
"consumed_energy_switch": RpcSensorDescription(
|
||||
key="switch",
|
||||
sub_key="ret_aenergy",
|
||||
name="Consumed energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value=lambda status, _: status["total"],
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_class=RpcConsumedEnergySensor,
|
||||
removal_condition=lambda _config, status, key: (
|
||||
status[key].get("ret_aenergy") is None
|
||||
),
|
||||
@@ -921,7 +954,7 @@ RPC_SENSORS: Final = {
|
||||
"energy_pm1": RpcSensorDescription(
|
||||
key="pm1",
|
||||
sub_key="aenergy",
|
||||
name="Energy",
|
||||
name="Total energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value=lambda status, _: status["total"],
|
||||
@@ -932,7 +965,18 @@ RPC_SENSORS: Final = {
|
||||
"ret_energy_pm1": RpcSensorDescription(
|
||||
key="pm1",
|
||||
sub_key="ret_aenergy",
|
||||
name="Total active returned energy",
|
||||
name="Returned energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value=lambda status, _: status["total"],
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
"consumed_energy_pm1": RpcSensorDescription(
|
||||
key="pm1",
|
||||
sub_key="ret_aenergy",
|
||||
name="Consumed energy",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
value=lambda status, _: status["total"],
|
||||
@@ -940,6 +984,7 @@ RPC_SENSORS: Final = {
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_class=RpcConsumedEnergySensor,
|
||||
),
|
||||
"energy_cct": RpcSensorDescription(
|
||||
key="cct",
|
||||
@@ -1489,6 +1534,41 @@ RPC_SENSORS: Final = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
role="water_temperature",
|
||||
),
|
||||
"number_work_state": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
translation_key="charger_state",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[
|
||||
"charger_charging",
|
||||
"charger_end",
|
||||
"charger_fault",
|
||||
"charger_free",
|
||||
"charger_free_fault",
|
||||
"charger_insert",
|
||||
"charger_pause",
|
||||
"charger_wait",
|
||||
],
|
||||
role="work_state",
|
||||
),
|
||||
"number_energy_charge": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
suggested_display_precision=2,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
role="energy_charge",
|
||||
),
|
||||
"number_time_charge": RpcSensorDescription(
|
||||
key="number",
|
||||
sub_key="value",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
suggested_display_precision=0,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
role="time_charge",
|
||||
),
|
||||
"presence_num_objects": RpcSensorDescription(
|
||||
key="presence",
|
||||
sub_key="num_objects",
|
||||
|
@@ -141,6 +141,18 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"charger_state": {
|
||||
"state": {
|
||||
"charger_charging": "[%key:common::state::charging%]",
|
||||
"charger_end": "Charge completed",
|
||||
"charger_fault": "Error while charging",
|
||||
"charger_free": "[%key:component::binary_sensor::entity_component::plug::state::off%]",
|
||||
"charger_free_fault": "Can not release plug",
|
||||
"charger_insert": "[%key:component::binary_sensor::entity_component::plug::state::on%]",
|
||||
"charger_pause": "Charging paused by charger",
|
||||
"charger_wait": "Charging paused by vehicle"
|
||||
}
|
||||
},
|
||||
"detected_objects": {
|
||||
"unit_of_measurement": "objects"
|
||||
},
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/smhi",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pysmhi"],
|
||||
"requirements": ["pysmhi==1.0.2"]
|
||||
"requirements": ["pysmhi==1.1.0"]
|
||||
}
|
||||
|
@@ -19,6 +19,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.BUTTON,
|
||||
Platform.EVENT,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
|
69
homeassistant/components/snoo/button.py
Normal file
69
homeassistant/components/snoo/button.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Support for Snoo Buttons."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from python_snoo.containers import SnooDevice
|
||||
from python_snoo.exceptions import SnooCommandException
|
||||
from python_snoo.snoo import Snoo
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import SnooConfigEntry
|
||||
from .entity import SnooDescriptionEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class SnooButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Description for Snoo button entities."""
|
||||
|
||||
press_fn: Callable[[Snoo, SnooDevice], Awaitable[None]]
|
||||
|
||||
|
||||
BUTTON_DESCRIPTIONS: list[SnooButtonEntityDescription] = [
|
||||
SnooButtonEntityDescription(
|
||||
key="start_snoo",
|
||||
translation_key="start_snoo",
|
||||
press_fn=lambda snoo, device: snoo.start_snoo(
|
||||
device,
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: SnooConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up buttons for Snoo device."""
|
||||
coordinators = entry.runtime_data
|
||||
async_add_entities(
|
||||
SnooButton(coordinator, description)
|
||||
for coordinator in coordinators.values()
|
||||
for description in BUTTON_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class SnooButton(SnooDescriptionEntity, ButtonEntity):
|
||||
"""Representation of a Snoo button."""
|
||||
|
||||
entity_description: SnooButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
try:
|
||||
await self.entity_description.press_fn(
|
||||
self.coordinator.snoo,
|
||||
self.coordinator.device,
|
||||
)
|
||||
except SnooCommandException as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key=f"{self.entity_description.key}_failed",
|
||||
translation_placeholders={"name": str(self.name)},
|
||||
) from err
|
9
homeassistant/components/snoo/icons.json
Normal file
9
homeassistant/components/snoo/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"start_snoo": {
|
||||
"default": "mdi:play"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -25,6 +25,9 @@
|
||||
"select_failed": {
|
||||
"message": "Error while updating {name} to {option}"
|
||||
},
|
||||
"start_snoo_failed": {
|
||||
"message": "Starting {name} failed"
|
||||
},
|
||||
"switch_on_failed": {
|
||||
"message": "Turning {name} on failed"
|
||||
},
|
||||
@@ -41,6 +44,11 @@
|
||||
"name": "Right safety clip"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"start_snoo": {
|
||||
"name": "Start"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"event": {
|
||||
"name": "Snoo event",
|
||||
|
@@ -59,17 +59,12 @@ async def async_setup_entry(
|
||||
for select_data in SELECT_TYPES:
|
||||
if select_data.speaker_model == speaker.model_name.upper():
|
||||
if (
|
||||
state := getattr(speaker.soco, select_data.soco_attribute, None)
|
||||
) is not None:
|
||||
try:
|
||||
setattr(speaker, select_data.speaker_attribute, int(state))
|
||||
features.append(select_data)
|
||||
except ValueError:
|
||||
_LOGGER.error(
|
||||
"Invalid value for %s %s",
|
||||
select_data.speaker_attribute,
|
||||
state,
|
||||
)
|
||||
speaker.update_soco_int_attribute(
|
||||
select_data.soco_attribute, select_data.speaker_attribute
|
||||
)
|
||||
is not None
|
||||
):
|
||||
features.append(select_data)
|
||||
return features
|
||||
|
||||
async def _async_create_entities(speaker: SonosSpeaker) -> None:
|
||||
@@ -112,8 +107,9 @@ class SonosSelectEntity(SonosEntity, SelectEntity):
|
||||
@soco_error()
|
||||
def poll_state(self) -> None:
|
||||
"""Poll the device for the current state."""
|
||||
state = getattr(self.soco, self.soco_attribute)
|
||||
setattr(self.speaker, self.speaker_attribute, state)
|
||||
self.speaker.update_soco_int_attribute(
|
||||
self.soco_attribute, self.speaker_attribute
|
||||
)
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
|
@@ -275,6 +275,29 @@ class SonosSpeaker:
|
||||
"""Write states for associated SonosEntity instances."""
|
||||
async_dispatcher_send(self.hass, f"{SONOS_STATE_UPDATED}-{self.soco.uid}")
|
||||
|
||||
def update_soco_int_attribute(
|
||||
self, soco_attribute: str, speaker_attribute: str
|
||||
) -> int | None:
|
||||
"""Update an integer attribute from SoCo and set it on the speaker.
|
||||
|
||||
Returns the integer value if successful, otherwise None. Do not call from
|
||||
async context as it is a blocking function.
|
||||
"""
|
||||
value: int | None = None
|
||||
if (state := getattr(self.soco, soco_attribute, None)) is None:
|
||||
_LOGGER.error("Missing value for %s", speaker_attribute)
|
||||
else:
|
||||
try:
|
||||
value = int(state)
|
||||
except (TypeError, ValueError):
|
||||
_LOGGER.error(
|
||||
"Invalid value for %s %s",
|
||||
speaker_attribute,
|
||||
state,
|
||||
)
|
||||
setattr(self, speaker_attribute, value)
|
||||
return value
|
||||
|
||||
#
|
||||
# Properties
|
||||
#
|
||||
|
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import sqlparse
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.recorder import CONF_DB_URL, get_instance
|
||||
@@ -40,23 +39,11 @@ from .const import (
|
||||
DOMAIN,
|
||||
PLATFORMS,
|
||||
)
|
||||
from .util import redact_credentials
|
||||
from .util import redact_credentials, validate_sql_select
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def validate_sql_select(value: str) -> str:
|
||||
"""Validate that value is a SQL SELECT query."""
|
||||
if len(query := sqlparse.parse(value.lstrip().lstrip(";"))) > 1:
|
||||
raise vol.Invalid("Multiple SQL queries are not supported")
|
||||
if len(query) == 0 or (query_type := query[0].get_type()) == "UNKNOWN":
|
||||
raise vol.Invalid("Invalid SQL query")
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", query, query_type)
|
||||
raise vol.Invalid("Only SELECT queries allowed")
|
||||
return str(query[0])
|
||||
|
||||
|
||||
QUERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_COLUMN_NAME): cv.string,
|
||||
|
@@ -7,19 +7,11 @@ import decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import lambda_stmt
|
||||
from sqlalchemy.engine import Result
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.util import LRUCache
|
||||
from sqlalchemy.orm import scoped_session
|
||||
|
||||
from homeassistant.components.recorder import (
|
||||
CONF_DB_URL,
|
||||
SupportedDialect,
|
||||
get_instance,
|
||||
)
|
||||
from homeassistant.components.recorder import CONF_DB_URL, get_instance
|
||||
from homeassistant.components.sensor import CONF_STATE_CLASS
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -29,12 +21,10 @@ from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
MATCH_ALL,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
@@ -50,13 +40,16 @@ from homeassistant.helpers.trigger_template_entity import (
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_ADVANCED_OPTIONS, CONF_COLUMN_NAME, CONF_QUERY, DOMAIN
|
||||
from .models import SQLData
|
||||
from .util import redact_credentials, resolve_db_url
|
||||
from .util import (
|
||||
async_create_sessionmaker,
|
||||
generate_lambda_stmt,
|
||||
redact_credentials,
|
||||
resolve_db_url,
|
||||
validate_query,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_SQL_LAMBDA_CACHE: LRUCache = LRUCache(1000)
|
||||
|
||||
TRIGGER_ENTITY_OPTIONS = (
|
||||
CONF_AVAILABILITY,
|
||||
CONF_DEVICE_CLASS,
|
||||
@@ -145,36 +138,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_or_init_domain_data(hass: HomeAssistant) -> SQLData:
|
||||
"""Get or initialize domain data."""
|
||||
if DOMAIN in hass.data:
|
||||
sql_data: SQLData = hass.data[DOMAIN]
|
||||
return sql_data
|
||||
|
||||
session_makers_by_db_url: dict[str, scoped_session] = {}
|
||||
|
||||
#
|
||||
# Ensure we dispose of all engines at shutdown
|
||||
# to avoid unclean disconnects
|
||||
#
|
||||
# Shutdown all sessions in the executor since they will
|
||||
# do blocking I/O
|
||||
#
|
||||
def _shutdown_db_engines(event: Event) -> None:
|
||||
"""Shutdown all database engines."""
|
||||
for sessmaker in session_makers_by_db_url.values():
|
||||
sessmaker.connection().engine.dispose()
|
||||
|
||||
cancel_shutdown = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _shutdown_db_engines
|
||||
)
|
||||
|
||||
sql_data = SQLData(cancel_shutdown, session_makers_by_db_url)
|
||||
hass.data[DOMAIN] = sql_data
|
||||
return sql_data
|
||||
|
||||
|
||||
async def async_setup_sensor(
|
||||
hass: HomeAssistant,
|
||||
trigger_entity_config: ConfigType,
|
||||
@@ -187,70 +150,16 @@ async def async_setup_sensor(
|
||||
async_add_entities: AddEntitiesCallback | AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the SQL sensor."""
|
||||
try:
|
||||
instance = get_instance(hass)
|
||||
except KeyError: # No recorder loaded
|
||||
uses_recorder_db = False
|
||||
else:
|
||||
uses_recorder_db = db_url == instance.db_url
|
||||
sessmaker: scoped_session | None
|
||||
sql_data = _async_get_or_init_domain_data(hass)
|
||||
use_database_executor = False
|
||||
if uses_recorder_db and instance.dialect_name == SupportedDialect.SQLITE:
|
||||
use_database_executor = True
|
||||
assert instance.engine is not None
|
||||
sessmaker = scoped_session(sessionmaker(bind=instance.engine, future=True))
|
||||
# For other databases we need to create a new engine since
|
||||
# we want the connection to use the default timezone and these
|
||||
# database engines will use QueuePool as its only sqlite that
|
||||
# needs our custom pool. If there is already a session maker
|
||||
# for this db_url we can use that so we do not create a new engine
|
||||
# for every sensor.
|
||||
elif db_url in sql_data.session_makers_by_db_url:
|
||||
sessmaker = sql_data.session_makers_by_db_url[db_url]
|
||||
elif sessmaker := await hass.async_add_executor_job(
|
||||
_validate_and_get_session_maker_for_db_url, db_url
|
||||
):
|
||||
sql_data.session_makers_by_db_url[db_url] = sessmaker
|
||||
else:
|
||||
(
|
||||
sessmaker,
|
||||
uses_recorder_db,
|
||||
use_database_executor,
|
||||
) = await async_create_sessionmaker(hass, db_url)
|
||||
if sessmaker is None:
|
||||
return
|
||||
validate_query(hass, query_str, uses_recorder_db, unique_id)
|
||||
|
||||
upper_query = query_str.upper()
|
||||
if uses_recorder_db:
|
||||
redacted_query = redact_credentials(query_str)
|
||||
|
||||
issue_key = unique_id if unique_id else redacted_query
|
||||
# If the query has a unique id and they fix it we can dismiss the issue
|
||||
# but if it doesn't have a unique id they have to ignore it instead
|
||||
|
||||
if (
|
||||
"ENTITY_ID," in upper_query or "ENTITY_ID " in upper_query
|
||||
) and "STATES_META" not in upper_query:
|
||||
_LOGGER.error(
|
||||
"The query `%s` contains the keyword `entity_id` but does not "
|
||||
"reference the `states_meta` table. This will cause a full table "
|
||||
"scan and database instability. Please check the documentation and use "
|
||||
"`states_meta.entity_id` instead",
|
||||
redacted_query,
|
||||
)
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"entity_id_query_does_full_table_scan_{issue_key}",
|
||||
translation_key="entity_id_query_does_full_table_scan",
|
||||
translation_placeholders={"query": redacted_query},
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
)
|
||||
raise ValueError(
|
||||
"Query contains entity_id but does not reference states_meta"
|
||||
)
|
||||
|
||||
ir.async_delete_issue(
|
||||
hass, DOMAIN, f"entity_id_query_does_full_table_scan_{issue_key}"
|
||||
)
|
||||
|
||||
# MSSQL uses TOP and not LIMIT
|
||||
if not ("LIMIT" in upper_query or "SELECT TOP" in upper_query):
|
||||
if "mssql" in db_url:
|
||||
@@ -273,39 +182,6 @@ async def async_setup_sensor(
|
||||
)
|
||||
|
||||
|
||||
def _validate_and_get_session_maker_for_db_url(db_url: str) -> scoped_session | None:
|
||||
"""Validate the db_url and return a session maker.
|
||||
|
||||
This does I/O and should be run in the executor.
|
||||
"""
|
||||
sess: Session | None = None
|
||||
try:
|
||||
engine = sqlalchemy.create_engine(db_url, future=True)
|
||||
sessmaker = scoped_session(sessionmaker(bind=engine, future=True))
|
||||
# Run a dummy query just to test the db_url
|
||||
sess = sessmaker()
|
||||
sess.execute(sqlalchemy.text("SELECT 1;"))
|
||||
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error(
|
||||
"Couldn't connect using %s DB_URL: %s",
|
||||
redact_credentials(db_url),
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
return None
|
||||
else:
|
||||
return sessmaker
|
||||
finally:
|
||||
if sess:
|
||||
sess.close()
|
||||
|
||||
|
||||
def _generate_lambda_stmt(query: str) -> StatementLambdaElement:
|
||||
"""Generate the lambda statement."""
|
||||
text = sqlalchemy.text(query)
|
||||
return lambda_stmt(lambda: text, lambda_cache=_SQL_LAMBDA_CACHE)
|
||||
|
||||
|
||||
class SQLSensor(ManualTriggerSensorEntity):
|
||||
"""Representation of an SQL sensor."""
|
||||
|
||||
@@ -329,7 +205,7 @@ class SQLSensor(ManualTriggerSensorEntity):
|
||||
self.sessionmaker = sessmaker
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._use_database_executor = use_database_executor
|
||||
self._lambda_stmt = _generate_lambda_stmt(query)
|
||||
self._lambda_stmt = generate_lambda_stmt(query)
|
||||
if not yaml and (unique_id := trigger_entity_config.get(CONF_UNIQUE_ID)):
|
||||
self._attr_name = None
|
||||
self._attr_has_entity_name = True
|
||||
|
@@ -125,6 +125,7 @@
|
||||
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"ph": "[%key:component::sensor::entity_component::ph::name%]",
|
||||
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
|
||||
"pm4": "[%key:component::sensor::entity_component::pm4::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"power": "[%key:component::sensor::entity_component::power::name%]",
|
||||
|
@@ -4,13 +4,27 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.core import HomeAssistant
|
||||
import sqlalchemy
|
||||
from sqlalchemy import lambda_stmt
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.util import LRUCache
|
||||
import sqlparse
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import DB_URL_RE
|
||||
from homeassistant.components.recorder import SupportedDialect, get_instance
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .const import DB_URL_RE, DOMAIN
|
||||
from .models import SQLData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_SQL_LAMBDA_CACHE: LRUCache = LRUCache(1000)
|
||||
|
||||
|
||||
def redact_credentials(data: str | None) -> str:
|
||||
"""Redact credentials from string data."""
|
||||
@@ -25,3 +39,187 @@ def resolve_db_url(hass: HomeAssistant, db_url: str | None) -> str:
|
||||
if db_url and not db_url.isspace():
|
||||
return db_url
|
||||
return get_instance(hass).db_url
|
||||
|
||||
|
||||
def validate_sql_select(value: str) -> str:
|
||||
"""Validate that value is a SQL SELECT query."""
|
||||
if len(query := sqlparse.parse(value.lstrip().lstrip(";"))) > 1:
|
||||
raise vol.Invalid("Multiple SQL queries are not supported")
|
||||
if len(query) == 0 or (query_type := query[0].get_type()) == "UNKNOWN":
|
||||
raise vol.Invalid("Invalid SQL query")
|
||||
if query_type != "SELECT":
|
||||
_LOGGER.debug("The SQL query %s is of type %s", query, query_type)
|
||||
raise vol.Invalid("Only SELECT queries allowed")
|
||||
return str(query[0])
|
||||
|
||||
|
||||
async def async_create_sessionmaker(
|
||||
hass: HomeAssistant, db_url: str
|
||||
) -> tuple[scoped_session | None, bool, bool]:
|
||||
"""Create a session maker for the given db_url.
|
||||
|
||||
This function gets or creates a SQLAlchemy `scoped_session` for the given
|
||||
db_url. It reuses existing connections where possible and handles the special
|
||||
case for the default recorder's database to use the correct executor.
|
||||
|
||||
Args:
|
||||
hass: The Home Assistant instance.
|
||||
db_url: The database URL to connect to.
|
||||
|
||||
Returns:
|
||||
A tuple containing the following items:
|
||||
- (scoped_session | None): The SQLAlchemy session maker for executing
|
||||
queries. This is `None` if a connection to the database could not
|
||||
be established.
|
||||
- (bool): A flag indicating if the query is against the recorder
|
||||
database.
|
||||
- (bool): A flag indicating if the dedicated recorder database
|
||||
executor should be used.
|
||||
|
||||
"""
|
||||
try:
|
||||
instance = get_instance(hass)
|
||||
except KeyError: # No recorder loaded
|
||||
uses_recorder_db = False
|
||||
else:
|
||||
uses_recorder_db = db_url == instance.db_url
|
||||
sessmaker: scoped_session | None
|
||||
sql_data = _async_get_or_init_domain_data(hass)
|
||||
use_database_executor = False
|
||||
if uses_recorder_db and instance.dialect_name == SupportedDialect.SQLITE:
|
||||
use_database_executor = True
|
||||
assert instance.engine is not None
|
||||
sessmaker = scoped_session(sessionmaker(bind=instance.engine, future=True))
|
||||
# For other databases we need to create a new engine since
|
||||
# we want the connection to use the default timezone and these
|
||||
# database engines will use QueuePool as its only sqlite that
|
||||
# needs our custom pool. If there is already a session maker
|
||||
# for this db_url we can use that so we do not create a new engine
|
||||
# for every sensor.
|
||||
elif db_url in sql_data.session_makers_by_db_url:
|
||||
sessmaker = sql_data.session_makers_by_db_url[db_url]
|
||||
elif sessmaker := await hass.async_add_executor_job(
|
||||
_validate_and_get_session_maker_for_db_url, db_url
|
||||
):
|
||||
sql_data.session_makers_by_db_url[db_url] = sessmaker
|
||||
else:
|
||||
return (None, uses_recorder_db, use_database_executor)
|
||||
|
||||
return (sessmaker, uses_recorder_db, use_database_executor)
|
||||
|
||||
|
||||
def validate_query(
|
||||
hass: HomeAssistant,
|
||||
query_str: str,
|
||||
uses_recorder_db: bool,
|
||||
unique_id: str | None = None,
|
||||
) -> None:
|
||||
"""Validate the query against common performance issues.
|
||||
|
||||
Args:
|
||||
hass: The Home Assistant instance.
|
||||
query_str: The SQL query string to be validated.
|
||||
uses_recorder_db: A boolean indicating if the query is against the recorder database.
|
||||
unique_id: The unique ID of the entity, used for creating issue registry keys.
|
||||
|
||||
Raises:
|
||||
ValueError: If the query uses `entity_id` without referencing `states_meta`.
|
||||
|
||||
"""
|
||||
if not uses_recorder_db:
|
||||
return
|
||||
redacted_query = redact_credentials(query_str)
|
||||
|
||||
issue_key = unique_id if unique_id else redacted_query
|
||||
# If the query has a unique id and they fix it we can dismiss the issue
|
||||
# but if it doesn't have a unique id they have to ignore it instead
|
||||
|
||||
upper_query = query_str.upper()
|
||||
if (
|
||||
"ENTITY_ID," in upper_query or "ENTITY_ID " in upper_query
|
||||
) and "STATES_META" not in upper_query:
|
||||
_LOGGER.error(
|
||||
"The query `%s` contains the keyword `entity_id` but does not "
|
||||
"reference the `states_meta` table. This will cause a full table "
|
||||
"scan and database instability. Please check the documentation and use "
|
||||
"`states_meta.entity_id` instead",
|
||||
redacted_query,
|
||||
)
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"entity_id_query_does_full_table_scan_{issue_key}",
|
||||
translation_key="entity_id_query_does_full_table_scan",
|
||||
translation_placeholders={"query": redacted_query},
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
)
|
||||
raise ValueError("Query contains entity_id but does not reference states_meta")
|
||||
|
||||
ir.async_delete_issue(
|
||||
hass, DOMAIN, f"entity_id_query_does_full_table_scan_{issue_key}"
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_or_init_domain_data(hass: HomeAssistant) -> SQLData:
|
||||
"""Get or initialize domain data."""
|
||||
if DOMAIN in hass.data:
|
||||
sql_data: SQLData = hass.data[DOMAIN]
|
||||
return sql_data
|
||||
|
||||
session_makers_by_db_url: dict[str, scoped_session] = {}
|
||||
|
||||
#
|
||||
# Ensure we dispose of all engines at shutdown
|
||||
# to avoid unclean disconnects
|
||||
#
|
||||
# Shutdown all sessions in the executor since they will
|
||||
# do blocking I/O
|
||||
#
|
||||
def _shutdown_db_engines(event: Event) -> None:
|
||||
"""Shutdown all database engines."""
|
||||
for sessmaker in session_makers_by_db_url.values():
|
||||
sessmaker.connection().engine.dispose()
|
||||
|
||||
cancel_shutdown = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _shutdown_db_engines
|
||||
)
|
||||
|
||||
sql_data = SQLData(cancel_shutdown, session_makers_by_db_url)
|
||||
hass.data[DOMAIN] = sql_data
|
||||
return sql_data
|
||||
|
||||
|
||||
def _validate_and_get_session_maker_for_db_url(db_url: str) -> scoped_session | None:
|
||||
"""Validate the db_url and return a session maker.
|
||||
|
||||
This does I/O and should be run in the executor.
|
||||
"""
|
||||
sess: Session | None = None
|
||||
try:
|
||||
engine = sqlalchemy.create_engine(db_url, future=True)
|
||||
sessmaker = scoped_session(sessionmaker(bind=engine, future=True))
|
||||
# Run a dummy query just to test the db_url
|
||||
sess = sessmaker()
|
||||
sess.execute(sqlalchemy.text("SELECT 1;"))
|
||||
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error(
|
||||
"Couldn't connect using %s DB_URL: %s",
|
||||
redact_credentials(db_url),
|
||||
redact_credentials(str(err)),
|
||||
)
|
||||
return None
|
||||
else:
|
||||
return sessmaker
|
||||
finally:
|
||||
if sess:
|
||||
sess.close()
|
||||
|
||||
|
||||
def generate_lambda_stmt(query: str) -> StatementLambdaElement:
|
||||
"""Generate the lambda statement."""
|
||||
text = sqlalchemy.text(query)
|
||||
return lambda_stmt(lambda: text, lambda_cache=_SQL_LAMBDA_CACHE)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user