mirror of
https://github.com/home-assistant/core.git
synced 2025-09-20 02:19:36 +00:00
Compare commits
173 Commits
sort-commo
...
2025.3.0b8
Author | SHA1 | Date | |
---|---|---|---|
![]() |
51162320cb | ||
![]() |
b88eab8ba3 | ||
![]() |
6c080ee650 | ||
![]() |
8056b0df2b | ||
![]() |
3f94b7a61c | ||
![]() |
1484e46317 | ||
![]() |
2812c8a993 | ||
![]() |
5043e2ad10 | ||
![]() |
2c2fd76270 | ||
![]() |
7001f8daaf | ||
![]() |
b41fc932c5 | ||
![]() |
0872243297 | ||
![]() |
bba889975a | ||
![]() |
01e8ca6495 | ||
![]() |
7d82375f81 | ||
![]() |
47033e587b | ||
![]() |
e73b08b269 | ||
![]() |
a195a9107b | ||
![]() |
185949cc18 | ||
![]() |
c129f27c95 | ||
![]() |
6a5a66e2f9 | ||
![]() |
db63d9fcbf | ||
![]() |
5b3d798eca | ||
![]() |
a0dde2a7d6 | ||
![]() |
1bdc33d52d | ||
![]() |
f1d332da5a | ||
![]() |
304c13261a | ||
![]() |
c58cbfd6f4 | ||
![]() |
b890d3e15a | ||
![]() |
2c9b8b6835 | ||
![]() |
73cc1f51ca | ||
![]() |
dca77e8232 | ||
![]() |
03cb177e7c | ||
![]() |
ad04b53615 | ||
![]() |
46bcb307f6 | ||
![]() |
b816625028 | ||
![]() |
0940fc7806 | ||
![]() |
50aefc3653 | ||
![]() |
c0dc83cbc0 | ||
![]() |
8382663be4 | ||
![]() |
7e1309d874 | ||
![]() |
1d0cba1a43 | ||
![]() |
7d9a6ceb6b | ||
![]() |
6abdb28a03 | ||
![]() |
3690e03951 | ||
![]() |
4fe4d14f16 | ||
![]() |
74e8ffa555 | ||
![]() |
c257b228f1 | ||
![]() |
6ff0f67d03 | ||
![]() |
8fdff9ca37 | ||
![]() |
9055dff9bd | ||
![]() |
e766d681b5 | ||
![]() |
511e57d0b3 | ||
![]() |
74be49d00d | ||
![]() |
684c3aac6b | ||
![]() |
a718b6ebff | ||
![]() |
f17274d417 | ||
![]() |
1530139a61 | ||
![]() |
f56d65b2ec | ||
![]() |
21277a81d3 | ||
![]() |
e1ce5b8c69 | ||
![]() |
0323a9c4e6 | ||
![]() |
c7d89398a0 | ||
![]() |
8cc587d3a7 | ||
![]() |
5ad156767a | ||
![]() |
f54b3f4de2 | ||
![]() |
6f0c62dc9d | ||
![]() |
dce8bca103 | ||
![]() |
22af8af132 | ||
![]() |
8a62b882bf | ||
![]() |
708f22fe6f | ||
![]() |
a4e71e2055 | ||
![]() |
61a3cc37e0 | ||
![]() |
a0668e5a5b | ||
![]() |
b4b7142b55 | ||
![]() |
108b71d33c | ||
![]() |
2636a47333 | ||
![]() |
17116fcd6c | ||
![]() |
17c16144d1 | ||
![]() |
178d509d56 | ||
![]() |
09c129de40 | ||
![]() |
07128ba063 | ||
![]() |
a786ff53ff | ||
![]() |
d2e19c829d | ||
![]() |
94b342f26a | ||
![]() |
9e3e6b3f43 | ||
![]() |
4300900322 | ||
![]() |
342e04974d | ||
![]() |
fdb4c0a81f | ||
![]() |
6de878ffe4 | ||
![]() |
c63aaec09e | ||
![]() |
d8bf47c101 | ||
![]() |
736ff8828d | ||
![]() |
b501999a4c | ||
![]() |
3985f1c6c8 | ||
![]() |
46ec3987a8 | ||
![]() |
df4e5a54e3 | ||
![]() |
d8a259044f | ||
![]() |
0891669aee | ||
![]() |
83c0351338 | ||
![]() |
c5e5fe555d | ||
![]() |
345ba73777 | ||
![]() |
e4200a79a2 | ||
![]() |
381fa65ba0 | ||
![]() |
16314711b8 | ||
![]() |
553abe4a4a | ||
![]() |
6a1bbdb3a7 | ||
![]() |
59d92c75bd | ||
![]() |
7732e6878e | ||
![]() |
2cde317d59 | ||
![]() |
0c08430507 | ||
![]() |
fa6d7d5e3c | ||
![]() |
585b950a46 | ||
![]() |
3effc2e182 | ||
![]() |
0e1602ff71 | ||
![]() |
693584ce29 | ||
![]() |
2e972422c2 | ||
![]() |
3a21c36173 | ||
![]() |
25ee2e58a5 | ||
![]() |
561b3ae21b | ||
![]() |
5be7f49146 | ||
![]() |
2694828451 | ||
![]() |
3eea932b24 | ||
![]() |
468208502f | ||
![]() |
92268f894a | ||
![]() |
5e5fd6a2f2 | ||
![]() |
cadee73da8 | ||
![]() |
51099ae7d6 | ||
![]() |
b777c29bab | ||
![]() |
fc1190dafd | ||
![]() |
775a81829b | ||
![]() |
998757f09e | ||
![]() |
b964bc58be | ||
![]() |
bd80a78848 | ||
![]() |
37c8764426 | ||
![]() |
9262dec444 | ||
![]() |
3c3c4d2641 | ||
![]() |
c1898ece80 | ||
![]() |
fdf69fcd7d | ||
![]() |
e403bee95b | ||
![]() |
9be8fd4eac | ||
![]() |
e09b40c2bd | ||
![]() |
2826198d5d | ||
![]() |
5324f3e542 | ||
![]() |
7e97ef588b | ||
![]() |
bb120020a8 | ||
![]() |
bb9aba2a7d | ||
![]() |
b676c2f61b | ||
![]() |
0c092f80c7 | ||
![]() |
2bf592d8aa | ||
![]() |
e591157e37 | ||
![]() |
ee01aa73b8 | ||
![]() |
0f827fbf22 | ||
![]() |
4dca4a64b5 | ||
![]() |
b82886a3e1 | ||
![]() |
fe396cdf4b | ||
![]() |
5895245a31 | ||
![]() |
861ba0ee5e | ||
![]() |
d15f9edc57 | ||
![]() |
cab6ec0363 | ||
![]() |
eb26a2124b | ||
![]() |
4530fe4bf7 | ||
![]() |
b1865de58f | ||
![]() |
3ff04d6d04 | ||
![]() |
bd306abace | ||
![]() |
412ceca6f7 | ||
![]() |
8644fb1887 | ||
![]() |
622be70fee | ||
![]() |
7bc0c1b912 | ||
![]() |
3230e741e9 | ||
![]() |
81db3dea41 | ||
![]() |
fe348e17a3 | ||
![]() |
03f6508bd8 |
8
.github/workflows/builder.yml
vendored
8
.github/workflows/builder.yml
vendored
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -462,7 +462,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@@ -942,7 +942,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1271,7 +1271,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1410,7 +1410,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
|
54
.github/workflows/wheels.yml
vendored
54
.github/workflows/wheels.yml
vendored
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,15 +218,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
@@ -238,32 +230,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
requirements: "requirements_all.txt"
|
||||
|
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@@ -38,7 +38,6 @@
|
||||
"module": "pytest",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--timeout=10",
|
||||
"--picked"
|
||||
],
|
||||
},
|
||||
|
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1401,6 +1401,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
/tests/components/smartthings/ @joostlek
|
||||
/homeassistant/components/smarttub/ @mdz
|
||||
/tests/components/smarttub/ @mdz
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.0.0"],
|
||||
"requirements": ["accuweather==4.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -14,7 +14,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
@@ -195,7 +195,8 @@ class AlertEntity(Entity):
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Async Acknowledge alert."""
|
||||
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
|
||||
if not self._can_ack:
|
||||
raise ServiceValidationError("This alert cannot be acknowledged")
|
||||
self._ack = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,7 +22,9 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
@@ -59,7 +60,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
)
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
|
1
homeassistant/components/apollo_automation/__init__.py
Normal file
1
homeassistant/components/apollo_automation/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Apollo Automation."""
|
6
homeassistant/components/apollo_automation/manifest.json
Normal file
6
homeassistant/components/apollo_automation/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "apollo_automation",
|
||||
"name": "Apollo Automation",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "esphome"
|
||||
}
|
@@ -233,7 +233,6 @@ class AppleTVManager(DeviceListener):
|
||||
pass
|
||||
except Exception:
|
||||
_LOGGER.exception("Failed to connect")
|
||||
await self.disconnect()
|
||||
|
||||
async def _connect_loop(self) -> None:
|
||||
"""Connect loop background task function."""
|
||||
|
@@ -1103,12 +1103,16 @@ class PipelineRun:
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
|
@@ -14,6 +14,7 @@ from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
@@ -308,6 +309,12 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
|
||||
"""Raised when multiple exceptions occur."""
|
||||
|
||||
error_code = "multiple_errors"
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -1605,10 +1612,24 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
# If there's an unhandled exception, we keep it so we can rethrow it in case
|
||||
# the post backup actions also fail.
|
||||
unhandled_exc = sys.exception()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
except Exception as err:
|
||||
if not unhandled_exc:
|
||||
raise
|
||||
# If there's an unhandled exception, we wrap both that and the exception
|
||||
# from the post backup actions in an ExceptionGroup so the caller is
|
||||
# aware of both exceptions.
|
||||
raise BackupManagerExceptionGroup(
|
||||
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
|
||||
[unhandled_exc, err],
|
||||
) from None
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1620,7 +1641,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Generate backup contents and return the size."""
|
||||
if not tar_file_path:
|
||||
tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar"
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
try:
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Failed to create dir {tar_file_path.parent}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
|
||||
excludes = EXCLUDE_FROM_BACKUP
|
||||
if not database_included:
|
||||
@@ -1658,7 +1685,14 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
file_filter=is_excluded_by_filter,
|
||||
arcname="data",
|
||||
)
|
||||
return (tar_file_path, tar_file_path.stat().st_size)
|
||||
try:
|
||||
stat_result = tar_file_path.stat()
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting size of {tar_file_path}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
return (tar_file_path, stat_result.st_size)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
|
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.4.4",
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.24.0"
|
||||
"habluetooth==3.24.1"
|
||||
]
|
||||
}
|
||||
|
@@ -153,6 +153,27 @@ def _has_min_duration(
|
||||
return validate
|
||||
|
||||
|
||||
def _has_positive_interval(
|
||||
start_key: str, end_key: str, duration_key: str
|
||||
) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that the time span between start and end is greater than zero."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
if (duration := obj.get(duration_key)) is not None:
|
||||
if duration <= datetime.timedelta(seconds=0):
|
||||
raise vol.Invalid(f"Expected positive duration ({duration})")
|
||||
return obj
|
||||
|
||||
if (start := obj.get(start_key)) and (end := obj.get(end_key)):
|
||||
if start >= end:
|
||||
raise vol.Invalid(
|
||||
f"Expected end time to be after start time ({start}, {end})"
|
||||
)
|
||||
return obj
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that all values are of the same type."""
|
||||
|
||||
@@ -281,6 +302,7 @@ SERVICE_GET_EVENTS_SCHEMA: Final = vol.All(
|
||||
),
|
||||
}
|
||||
),
|
||||
_has_positive_interval(EVENT_START_DATETIME, EVENT_END_DATETIME, EVENT_DURATION),
|
||||
)
|
||||
|
||||
|
||||
@@ -870,6 +892,7 @@ async def async_get_events_service(
|
||||
end = start + service_call.data[EVENT_DURATION]
|
||||
else:
|
||||
end = service_call.data[EVENT_END_DATETIME]
|
||||
|
||||
calendar_event_list = await calendar.async_get_events(
|
||||
calendar.hass, dt_util.as_local(start), dt_util.as_local(end)
|
||||
)
|
||||
|
@@ -68,7 +68,6 @@ from .const import ( # noqa: F401
|
||||
FAN_ON,
|
||||
FAN_TOP,
|
||||
HVAC_MODES,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
PRESET_ACTIVITY,
|
||||
PRESET_AWAY,
|
||||
|
@@ -126,7 +126,6 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
DOMAIN = "climate"
|
||||
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Intents for the client integration."""
|
||||
"""Intents for the climate integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers import config_validation as cv, intent
|
||||
from . import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
ClimateEntityFeature,
|
||||
@@ -20,49 +19,9 @@ from . import (
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the climate intents."""
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
intent.async_register(hass, SetTemperatureIntent())
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
class SetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle SetTemperature intents."""
|
||||
|
||||
|
@@ -49,7 +49,11 @@ def async_get_chat_log(
|
||||
raise RuntimeError(
|
||||
"Cannot attach chat log delta listener unless initial caller"
|
||||
)
|
||||
if user_input is not None:
|
||||
if user_input is not None and (
|
||||
(content := chat_log.content[-1]).role != "user"
|
||||
# MyPy doesn't understand that content is a UserContent here
|
||||
or content.content != user_input.text # type: ignore[union-attr]
|
||||
):
|
||||
chat_log.async_add_user_content(UserContent(content=user_input.text))
|
||||
|
||||
yield chat_log
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"]
|
||||
}
|
||||
|
@@ -24,7 +24,14 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
EventStateReportedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -32,7 +39,10 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -200,13 +210,33 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
|
||||
@callback
|
||||
def calc_derivative(event: Event[EventStateChangedData]) -> None:
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
new_state, new_state.state, event.data["old_last_reported"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
new_state = event.data["new_state"]
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
|
||||
def calc_derivative(
|
||||
new_state: State, old_value: str, old_last_reported: datetime
|
||||
) -> None:
|
||||
"""Handle the sensor state changes."""
|
||||
if (
|
||||
(old_state := event.data["old_state"]) is None
|
||||
or old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
or (new_state := event.data["new_state"]) is None
|
||||
or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
if old_value in (STATE_UNKNOWN, STATE_UNAVAILABLE) or new_state.state in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
return
|
||||
|
||||
@@ -220,15 +250,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (new_state.last_updated - time_end).total_seconds()
|
||||
if (new_state.last_reported - time_end).total_seconds()
|
||||
< self._time_window
|
||||
]
|
||||
|
||||
try:
|
||||
elapsed_time = (
|
||||
new_state.last_updated - old_state.last_updated
|
||||
new_state.last_reported - old_last_reported
|
||||
).total_seconds()
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_state.state)
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_value)
|
||||
new_derivative = (
|
||||
delta_value
|
||||
/ Decimal(elapsed_time)
|
||||
@@ -240,7 +270,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
@@ -257,7 +287,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
# add latest derivative to the window list
|
||||
self._state_list.append(
|
||||
(old_state.last_updated, new_state.last_updated, new_derivative)
|
||||
(old_last_reported, new_state.last_reported, new_derivative)
|
||||
)
|
||||
|
||||
def calculate_weight(
|
||||
@@ -277,13 +307,19 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
else:
|
||||
derivative = Decimal("0.00")
|
||||
for start, end, value in self._state_list:
|
||||
weight = calculate_weight(start, end, new_state.last_updated)
|
||||
weight = calculate_weight(start, end, new_state.last_reported)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._sensor_source_id, calc_derivative
|
||||
self.hass, self._sensor_source_id, on_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_report_event(
|
||||
self.hass, self._sensor_source_id, on_state_reported
|
||||
)
|
||||
)
|
||||
|
@@ -8,6 +8,7 @@ from devolo_plc_api.device_api import (
|
||||
WifiGuestAccessGet,
|
||||
)
|
||||
from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -43,7 +44,7 @@ class DevoloEntity(Entity):
|
||||
self.entry = entry
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self.device.ip}",
|
||||
configuration_url=URL.build(scheme="http", host=self.device.ip),
|
||||
identifiers={(DOMAIN, str(self.device.serial_number))},
|
||||
manufacturer="devolo",
|
||||
model=self.device.product,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.2.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aioecowitt==2024.2.1"]
|
||||
"requirements": ["aioecowitt==2025.3.1"]
|
||||
}
|
||||
|
@@ -105,6 +105,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
)
|
||||
_element: Area
|
||||
|
||||
@@ -204,7 +205,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME,
|
||||
ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_VACATION,
|
||||
}
|
||||
|
||||
if self._element.alarm_state is None:
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["sense_energy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sense-energy==0.13.5"]
|
||||
"requirements": ["sense-energy==0.13.6"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.7.2"]
|
||||
"requirements": ["env-canada==0.8.0"]
|
||||
}
|
||||
|
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.1"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.8.0"]
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from math import isfinite
|
||||
from typing import Any, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -238,9 +239,13 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_state_property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
if not self._static_info.supports_current_humidity:
|
||||
if (
|
||||
not self._static_info.supports_current_humidity
|
||||
or (val := self._state.current_humidity) is None
|
||||
or not isfinite(val)
|
||||
):
|
||||
return None
|
||||
return round(self._state.current_humidity)
|
||||
return round(val)
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
|
@@ -41,6 +41,7 @@ from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_NOISE_PSK,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||
DOMAIN,
|
||||
@@ -508,6 +509,10 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
CONF_ALLOW_SERVICE_CALLS, DEFAULT_ALLOW_SERVICE_CALLS
|
||||
),
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
default=self.config_entry.options.get(CONF_SUBSCRIBE_LOGS, False),
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return self.async_show_form(step_id="init", data_schema=data_schema)
|
||||
|
@@ -5,6 +5,7 @@ from awesomeversion import AwesomeVersion
|
||||
DOMAIN = "esphome"
|
||||
|
||||
CONF_ALLOW_SERVICE_CALLS = "allow_service_calls"
|
||||
CONF_SUBSCRIBE_LOGS = "subscribe_logs"
|
||||
CONF_DEVICE_NAME = "device_name"
|
||||
CONF_NOISE_PSK = "noise_psk"
|
||||
|
||||
@@ -12,11 +13,13 @@ DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2023.8.0"
|
||||
STABLE_BLE_VERSION_STR = "2025.2.2"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
}
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_VERSION_STR}.html"
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy"
|
||||
|
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -16,6 +17,7 @@ from aioesphomeapi import (
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
LogLevel,
|
||||
ReconnectLogic,
|
||||
RequiresEncryptionAPIError,
|
||||
UserService,
|
||||
@@ -33,6 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
@@ -61,6 +64,7 @@ from .bluetooth import async_connect_scanner
|
||||
from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
@@ -74,8 +78,38 @@ from .domain_data import DomainData
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aioesphomeapi.api_pb2 import ( # type: ignore[attr-defined]
|
||||
SubscribeLogsResponse,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LOG_LEVEL_TO_LOGGER = {
|
||||
LogLevel.LOG_LEVEL_NONE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_ERROR: logging.ERROR,
|
||||
LogLevel.LOG_LEVEL_WARN: logging.WARNING,
|
||||
LogLevel.LOG_LEVEL_INFO: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_CONFIG: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_DEBUG: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERBOSE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERY_VERBOSE: logging.DEBUG,
|
||||
}
|
||||
LOGGER_TO_LOG_LEVEL = {
|
||||
logging.NOTSET: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.DEBUG: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.INFO: LogLevel.LOG_LEVEL_CONFIG,
|
||||
logging.WARNING: LogLevel.LOG_LEVEL_WARN,
|
||||
logging.ERROR: LogLevel.LOG_LEVEL_ERROR,
|
||||
logging.CRITICAL: LogLevel.LOG_LEVEL_ERROR,
|
||||
}
|
||||
# 7-bit and 8-bit C1 ANSI sequences
|
||||
# https://stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python
|
||||
ANSI_ESCAPE_78BIT = re.compile(
|
||||
rb"(?:\x1B[@-Z\\-_]|[\x80-\x9A\x9C-\x9F]|(?:\x1B\[|\x9B)[0-?]*[ -/]*[@-~])"
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_check_firmware_version(
|
||||
@@ -136,6 +170,8 @@ class ESPHomeManager:
|
||||
"""Class to manage an ESPHome connection."""
|
||||
|
||||
__slots__ = (
|
||||
"_cancel_subscribe_logs",
|
||||
"_log_level",
|
||||
"cli",
|
||||
"device_id",
|
||||
"domain_data",
|
||||
@@ -169,6 +205,8 @@ class ESPHomeManager:
|
||||
self.reconnect_logic: ReconnectLogic | None = None
|
||||
self.zeroconf_instance = zeroconf_instance
|
||||
self.entry_data = entry.runtime_data
|
||||
self._cancel_subscribe_logs: CALLBACK_TYPE | None = None
|
||||
self._log_level = LogLevel.LOG_LEVEL_NONE
|
||||
|
||||
async def on_stop(self, event: Event) -> None:
|
||||
"""Cleanup the socket client on HA close."""
|
||||
@@ -341,6 +379,34 @@ class ESPHomeManager:
|
||||
# Re-connection logic will trigger after this
|
||||
await self.cli.disconnect()
|
||||
|
||||
def _async_on_log(self, msg: SubscribeLogsResponse) -> None:
|
||||
"""Handle a log message from the API."""
|
||||
log: bytes = msg.message
|
||||
_LOGGER.log(
|
||||
LOG_LEVEL_TO_LOGGER.get(msg.level, logging.DEBUG),
|
||||
"%s: %s",
|
||||
self.entry.title,
|
||||
ANSI_ESCAPE_78BIT.sub(b"", log).decode("utf-8", "backslashreplace"),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_get_equivalent_log_level(self) -> LogLevel:
|
||||
"""Get the equivalent ESPHome log level for the current logger."""
|
||||
return LOGGER_TO_LOG_LEVEL.get(
|
||||
_LOGGER.getEffectiveLevel(), LogLevel.LOG_LEVEL_VERY_VERBOSE
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_subscribe_logs(self, log_level: LogLevel) -> None:
|
||||
"""Subscribe to logs."""
|
||||
if self._cancel_subscribe_logs is not None:
|
||||
self._cancel_subscribe_logs()
|
||||
self._cancel_subscribe_logs = None
|
||||
self._log_level = log_level
|
||||
self._cancel_subscribe_logs = self.cli.subscribe_logs(
|
||||
self._async_on_log, self._log_level
|
||||
)
|
||||
|
||||
async def _on_connnect(self) -> None:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
entry = self.entry
|
||||
@@ -352,6 +418,8 @@ class ESPHomeManager:
|
||||
cli = self.cli
|
||||
stored_device_name = entry.data.get(CONF_DEVICE_NAME)
|
||||
unique_id_is_mac_address = unique_id and ":" in unique_id
|
||||
if entry.options.get(CONF_SUBSCRIBE_LOGS):
|
||||
self._async_subscribe_logs(self._async_get_equivalent_log_level())
|
||||
results = await asyncio.gather(
|
||||
create_eager_task(cli.device_info()),
|
||||
create_eager_task(cli.list_entities_services()),
|
||||
@@ -503,6 +571,10 @@ class ESPHomeManager:
|
||||
def _async_handle_logging_changed(self, _event: Event) -> None:
|
||||
"""Handle when the logging level changes."""
|
||||
self.cli.set_debug(_LOGGER.isEnabledFor(logging.DEBUG))
|
||||
if self.entry.options.get(CONF_SUBSCRIBE_LOGS) and self._log_level != (
|
||||
new_log_level := self._async_get_equivalent_log_level()
|
||||
):
|
||||
self._async_subscribe_logs(new_log_level)
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the esphome connection manager."""
|
||||
|
@@ -16,9 +16,9 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.1.1",
|
||||
"aioesphomeapi==29.2.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.7.1"
|
||||
"bleak-esphome==2.8.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@@ -54,7 +54,8 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions."
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions.",
|
||||
"subscribe_logs": "Subscribe to logs from the device. When enabled, the device will send logs to Home Assistant and you can view them in the logs panel."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250221.0"]
|
||||
"requirements": ["home-assistant-frontend==20250305.0"]
|
||||
}
|
||||
|
@@ -111,9 +111,20 @@ def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
continue
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
if key == "type":
|
||||
elif key == "type":
|
||||
val = val.upper()
|
||||
if key == "items":
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
|
@@ -20,3 +20,4 @@ MAX_ERRORS = 2
|
||||
TARGET_TEMPERATURE_STEP = 1
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL = UPDATE_INTERVAL * 2
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -24,6 +25,7 @@ from .const import (
|
||||
DISPATCH_DEVICE_DISCOVERED,
|
||||
DOMAIN,
|
||||
MAX_ERRORS,
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL,
|
||||
UPDATE_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -48,7 +50,6 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
always_update=False,
|
||||
)
|
||||
self.device = device
|
||||
self.device.add_handler(Response.DATA, self.device_state_updated)
|
||||
self.device.add_handler(Response.RESULT, self.device_state_updated)
|
||||
|
||||
self._error_count: int = 0
|
||||
@@ -88,7 +89,9 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# raise update failed if time for more than MAX_ERRORS has passed since last update
|
||||
now = utcnow()
|
||||
elapsed_success = now - self._last_response_time
|
||||
if self.update_interval and elapsed_success >= self.update_interval:
|
||||
if self.update_interval and elapsed_success >= timedelta(
|
||||
seconds=MAX_EXPECTED_RESPONSE_TIME_INTERVAL
|
||||
):
|
||||
if not self._last_error_time or (
|
||||
(now - self.update_interval) >= self._last_error_time
|
||||
):
|
||||
@@ -96,16 +99,19 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self._error_count += 1
|
||||
|
||||
_LOGGER.warning(
|
||||
"Device %s is unresponsive for %s seconds",
|
||||
"Device %s took an unusually long time to respond, %s seconds",
|
||||
self.name,
|
||||
elapsed_success,
|
||||
)
|
||||
else:
|
||||
self._error_count = 0
|
||||
if self.last_update_success and self._error_count >= MAX_ERRORS:
|
||||
raise UpdateFailed(
|
||||
f"Device {self.name} is unresponsive for too long and now unavailable"
|
||||
)
|
||||
|
||||
return self.device.raw_properties
|
||||
self._last_response_time = utcnow()
|
||||
return copy.deepcopy(self.device.raw_properties)
|
||||
|
||||
async def push_state_update(self):
|
||||
"""Send state updates to the physical device."""
|
||||
|
@@ -26,6 +26,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="todayEnergy",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_output_power",
|
||||
@@ -33,6 +34,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="invTodayPpv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_energy_output",
|
||||
|
@@ -11,7 +11,6 @@ from hko import HKO, HKOError
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_CLOUDY,
|
||||
ATTR_CONDITION_FOG,
|
||||
ATTR_CONDITION_HAIL,
|
||||
ATTR_CONDITION_LIGHTNING_RAINY,
|
||||
ATTR_CONDITION_PARTLYCLOUDY,
|
||||
ATTR_CONDITION_POURING,
|
||||
@@ -145,7 +144,7 @@ class HKOUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Return the condition corresponding to the weather info."""
|
||||
info = info.lower()
|
||||
if WEATHER_INFO_RAIN in info:
|
||||
return ATTR_CONDITION_HAIL
|
||||
return ATTR_CONDITION_RAINY
|
||||
if WEATHER_INFO_SNOW in info and WEATHER_INFO_RAIN in info:
|
||||
return ATTR_CONDITION_SNOWY_RAINY
|
||||
if WEATHER_INFO_SNOW in info:
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.67", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.68", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -72,22 +72,27 @@ def _handle_paired_or_connected_appliance(
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
)
|
||||
changed_options_listener_remove_callback = (
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
get_option_entities_for_appliance,
|
||||
async_add_entities,
|
||||
),
|
||||
for event_key in (
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
|
||||
):
|
||||
changed_options_listener_remove_callback = (
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
get_option_entities_for_appliance,
|
||||
async_add_entities,
|
||||
),
|
||||
(appliance.info.ha_id, event_key),
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(changed_options_listener_remove_callback)
|
||||
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
|
||||
changed_options_listener_remove_callback
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(changed_options_listener_remove_callback)
|
||||
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
|
||||
changed_options_listener_remove_callback
|
||||
)
|
||||
known_entity_unique_ids.update(
|
||||
{
|
||||
cast(str, entity.unique_id): appliance.info.ha_id
|
||||
|
@@ -4,6 +4,8 @@ from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey, OptionKey, ProgramKey, SettingKey, StatusKey
|
||||
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime, UnitOfVolume
|
||||
|
||||
from .utils import bsh_key_to_translation_key
|
||||
|
||||
DOMAIN = "home_connect"
|
||||
@@ -21,6 +23,13 @@ APPLIANCES_WITH_PROGRAMS = (
|
||||
"WasherDryer",
|
||||
)
|
||||
|
||||
UNIT_MAP = {
|
||||
"seconds": UnitOfTime.SECONDS,
|
||||
"ml": UnitOfVolume.MILLILITERS,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
}
|
||||
|
||||
|
||||
BSH_POWER_ON = "BSH.Common.EnumType.PowerState.On"
|
||||
BSH_POWER_OFF = "BSH.Common.EnumType.PowerState.Off"
|
||||
|
@@ -47,8 +47,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type HomeConnectConfigEntry = ConfigEntry[HomeConnectCoordinator]
|
||||
|
||||
EVENT_STREAM_RECONNECT_DELAY = 30
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeConnectApplianceData:
|
||||
@@ -100,6 +98,7 @@ class HomeConnectCoordinator(
|
||||
CALLBACK_TYPE, tuple[CALLBACK_TYPE, tuple[EventKey, ...]]
|
||||
] = {}
|
||||
self.device_registry = dr.async_get(self.hass)
|
||||
self.data = {}
|
||||
|
||||
@cached_property
|
||||
def context_listeners(self) -> dict[tuple[str, EventKey], list[CALLBACK_TYPE]]:
|
||||
@@ -157,10 +156,20 @@ class HomeConnectCoordinator(
|
||||
|
||||
async def _event_listener(self) -> None:
|
||||
"""Match event with listener for event type."""
|
||||
retry_time = 10
|
||||
while True:
|
||||
try:
|
||||
async for event_message in self.client.stream_all_events():
|
||||
retry_time = 10
|
||||
event_message_ha_id = event_message.ha_id
|
||||
if (
|
||||
event_message_ha_id in self.data
|
||||
and not self.data[event_message_ha_id].info.connected
|
||||
):
|
||||
self.data[event_message_ha_id].info.connected = True
|
||||
self._call_all_event_listeners_for_appliance(
|
||||
event_message_ha_id
|
||||
)
|
||||
match event_message.type:
|
||||
case EventType.STATUS:
|
||||
statuses = self.data[event_message_ha_id].status
|
||||
@@ -256,20 +265,18 @@ class HomeConnectCoordinator(
|
||||
except (EventStreamInterruptedError, HomeConnectRequestError) as error:
|
||||
_LOGGER.debug(
|
||||
"Non-breaking error (%s) while listening for events,"
|
||||
" continuing in 30 seconds",
|
||||
" continuing in %s seconds",
|
||||
type(error).__name__,
|
||||
retry_time,
|
||||
)
|
||||
await asyncio.sleep(EVENT_STREAM_RECONNECT_DELAY)
|
||||
await asyncio.sleep(retry_time)
|
||||
retry_time = min(retry_time * 2, 3600)
|
||||
except HomeConnectApiError as error:
|
||||
_LOGGER.error("Error while listening for events: %s", error)
|
||||
self.hass.config_entries.async_schedule_reload(
|
||||
self.config_entry.entry_id
|
||||
)
|
||||
break
|
||||
# if there was a non-breaking error, we continue listening
|
||||
# but we need to refresh the data to get the possible changes
|
||||
# that happened while the event stream was interrupted
|
||||
await self.async_refresh()
|
||||
|
||||
@callback
|
||||
def _call_event_listener(self, event_message: EventMessage) -> None:
|
||||
@@ -297,6 +304,8 @@ class HomeConnectCoordinator(
|
||||
translation_placeholders=get_dict_from_home_connect_error(error),
|
||||
) from error
|
||||
except HomeConnectError as error:
|
||||
for appliance_data in self.data.values():
|
||||
appliance_data.info.connected = False
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="fetch_api_error",
|
||||
@@ -305,7 +314,7 @@ class HomeConnectCoordinator(
|
||||
|
||||
return {
|
||||
appliance.ha_id: await self._get_appliance_data(
|
||||
appliance, self.data.get(appliance.ha_id) if self.data else None
|
||||
appliance, self.data.get(appliance.ha_id)
|
||||
)
|
||||
for appliance in appliances.homeappliances
|
||||
}
|
||||
@@ -440,13 +449,27 @@ class HomeConnectCoordinator(
|
||||
self, ha_id: str, program_key: ProgramKey
|
||||
) -> dict[OptionKey, ProgramDefinitionOption]:
|
||||
"""Get options with constraints for appliance."""
|
||||
return {
|
||||
option.key: option
|
||||
for option in (
|
||||
await self.client.get_available_program(ha_id, program_key=program_key)
|
||||
).options
|
||||
or []
|
||||
}
|
||||
if program_key is ProgramKey.UNKNOWN:
|
||||
return {}
|
||||
try:
|
||||
return {
|
||||
option.key: option
|
||||
for option in (
|
||||
await self.client.get_available_program(
|
||||
ha_id, program_key=program_key
|
||||
)
|
||||
).options
|
||||
or []
|
||||
}
|
||||
except HomeConnectError as error:
|
||||
_LOGGER.debug(
|
||||
"Error fetching options for %s: %s",
|
||||
ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
)
|
||||
return {}
|
||||
|
||||
async def update_options(
|
||||
self, ha_id: str, event_key: EventKey, program_key: ProgramKey
|
||||
@@ -456,8 +479,7 @@ class HomeConnectCoordinator(
|
||||
events = self.data[ha_id].events
|
||||
options_to_notify = options.copy()
|
||||
options.clear()
|
||||
if program_key is not ProgramKey.UNKNOWN:
|
||||
options.update(await self.get_options_definitions(ha_id, program_key))
|
||||
options.update(await self.get_options_definitions(ha_id, program_key))
|
||||
|
||||
for option in options.values():
|
||||
option_value = option.constraints.default if option.constraints else None
|
||||
|
@@ -8,6 +8,7 @@ from typing import cast
|
||||
from aiohomeconnect.model import EventKey, OptionKey
|
||||
from aiohomeconnect.model.error import ActiveProgramNotSetError, HomeConnectError
|
||||
|
||||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -51,8 +52,10 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.update_native_value()
|
||||
available = self._attr_available = self.appliance.info.connected
|
||||
self.async_write_ha_state()
|
||||
_LOGGER.debug("Updated %s, new state: %s", self.entity_id, self.state)
|
||||
state = STATE_UNAVAILABLE if not available else self.state
|
||||
_LOGGER.debug("Updated %s, new state: %s", self.entity_id, state)
|
||||
|
||||
@property
|
||||
def bsh_key(self) -> str:
|
||||
@@ -61,10 +64,13 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]):
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
self.appliance.info.connected and self._attr_available and super().available
|
||||
)
|
||||
"""Return True if entity is available.
|
||||
|
||||
Do not use self.last_update_success for available state
|
||||
as event updates should take precedence over the coordinator
|
||||
refresh.
|
||||
"""
|
||||
return self._attr_available
|
||||
|
||||
|
||||
class HomeConnectOptionEntity(HomeConnectEntity):
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"requirements": ["aiohomeconnect==0.15.0"],
|
||||
"requirements": ["aiohomeconnect==0.16.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -11,7 +11,6 @@ from homeassistant.components.number import (
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime, UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -23,6 +22,7 @@ from .const import (
|
||||
SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID,
|
||||
SVE_TRANSLATION_PLACEHOLDER_KEY,
|
||||
SVE_TRANSLATION_PLACEHOLDER_VALUE,
|
||||
UNIT_MAP,
|
||||
)
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
@@ -32,13 +32,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
UNIT_MAP = {
|
||||
"seconds": UnitOfTime.SECONDS,
|
||||
"ml": UnitOfVolume.MILLILITERS,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
}
|
||||
|
||||
NUMBERS = (
|
||||
NumberEntityDescription(
|
||||
key=SettingKey.REFRIGERATION_FRIDGE_FREEZER_SETPOINT_TEMPERATURE_REFRIGERATOR,
|
||||
|
@@ -1,10 +1,12 @@
|
||||
"""Provides a sensor for Home Connect."""
|
||||
|
||||
import contextlib
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey, StatusKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -23,6 +25,7 @@ from .const import (
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
UNIT_MAP,
|
||||
)
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity
|
||||
@@ -40,6 +43,7 @@ class HomeConnectSensorEntityDescription(
|
||||
|
||||
default_value: str | None = None
|
||||
appliance_types: tuple[str, ...] | None = None
|
||||
fetch_unit: bool = False
|
||||
|
||||
|
||||
BSH_PROGRAM_SENSORS = (
|
||||
@@ -183,7 +187,8 @@ SENSORS = (
|
||||
key=StatusKey.COOKING_OVEN_CURRENT_CAVITY_TEMPERATURE,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="current_cavity_temperature",
|
||||
translation_key="oven_current_cavity_temperature",
|
||||
fetch_unit=True,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -318,6 +323,29 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
|
||||
case _:
|
||||
self._attr_native_value = status
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if self.entity_description.fetch_unit:
|
||||
data = self.appliance.status[cast(StatusKey, self.bsh_key)]
|
||||
if data.unit:
|
||||
self._attr_native_unit_of_measurement = UNIT_MAP.get(
|
||||
data.unit, data.unit
|
||||
)
|
||||
else:
|
||||
await self.fetch_unit()
|
||||
|
||||
async def fetch_unit(self) -> None:
|
||||
"""Fetch the unit of measurement."""
|
||||
with contextlib.suppress(HomeConnectError):
|
||||
data = await self.coordinator.client.get_status_value(
|
||||
self.appliance.info.ha_id, status_key=cast(StatusKey, self.bsh_key)
|
||||
)
|
||||
if data.unit:
|
||||
self._attr_native_unit_of_measurement = UNIT_MAP.get(
|
||||
data.unit, data.unit
|
||||
)
|
||||
|
||||
|
||||
class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
"""Sensor class for Home Connect sensors that reports information related to the running program."""
|
||||
|
@@ -1529,8 +1529,8 @@
|
||||
"map3": "Map 3"
|
||||
}
|
||||
},
|
||||
"current_cavity_temperature": {
|
||||
"name": "Current cavity temperature"
|
||||
"oven_current_cavity_temperature": {
|
||||
"name": "Current oven cavity temperature"
|
||||
},
|
||||
"freezer_door_alarm": {
|
||||
"name": "Freezer door alarm",
|
||||
|
@@ -437,18 +437,21 @@ def ws_expose_entity(
|
||||
def ws_list_exposed_entities(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Expose an entity to an assistant."""
|
||||
"""List entities which are exposed to assistants."""
|
||||
result: dict[str, Any] = {}
|
||||
|
||||
exposed_entities = hass.data[DATA_EXPOSED_ENTITIES]
|
||||
entity_registry = er.async_get(hass)
|
||||
for entity_id in chain(exposed_entities.entities, entity_registry.entities):
|
||||
result[entity_id] = {}
|
||||
exposed_to = {}
|
||||
entity_settings = async_get_entity_settings(hass, entity_id)
|
||||
for assistant, settings in entity_settings.items():
|
||||
if "should_expose" not in settings:
|
||||
if "should_expose" not in settings or not settings["should_expose"]:
|
||||
continue
|
||||
result[entity_id][assistant] = settings["should_expose"]
|
||||
exposed_to[assistant] = True
|
||||
if not exposed_to:
|
||||
continue
|
||||
result[entity_id] = exposed_to
|
||||
connection.send_result(msg["id"], {"exposed_entities": result})
|
||||
|
||||
|
||||
|
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"brightness": {
|
||||
"default": "mdi:brightness-5"
|
||||
},
|
||||
"brightness_instance": {
|
||||
"default": "mdi:brightness-5"
|
||||
},
|
||||
"link_quality": {
|
||||
"default": "mdi:signal"
|
||||
},
|
||||
@@ -9,7 +15,7 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"watchdog_on_off": {
|
||||
"watchdog": {
|
||||
"default": "mdi:dog"
|
||||
},
|
||||
"manual_operation": {
|
||||
|
@@ -40,10 +40,22 @@ def get_window_value(attribute: HomeeAttribute) -> str | None:
|
||||
return vals.get(attribute.current_value)
|
||||
|
||||
|
||||
def get_brightness_device_class(
|
||||
attribute: HomeeAttribute, device_class: SensorDeviceClass | None
|
||||
) -> SensorDeviceClass | None:
|
||||
"""Return the device class for a brightness sensor."""
|
||||
if attribute.unit == "%":
|
||||
return None
|
||||
return device_class
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class HomeeSensorEntityDescription(SensorEntityDescription):
|
||||
"""A class that describes Homee sensor entities."""
|
||||
|
||||
device_class_fn: Callable[
|
||||
[HomeeAttribute, SensorDeviceClass | None], SensorDeviceClass | None
|
||||
] = lambda attribute, device_class: device_class
|
||||
value_fn: Callable[[HomeeAttribute], str | float | None] = (
|
||||
lambda value: value.current_value
|
||||
)
|
||||
@@ -67,6 +79,7 @@ SENSOR_DESCRIPTIONS: dict[AttributeType, HomeeSensorEntityDescription] = {
|
||||
AttributeType.BRIGHTNESS: HomeeSensorEntityDescription(
|
||||
key="brightness",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
device_class_fn=get_brightness_device_class,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=(
|
||||
lambda attribute: attribute.current_value * 1000
|
||||
@@ -303,6 +316,9 @@ class HomeeSensor(HomeeEntity, SensorEntity):
|
||||
if attribute.instance > 0:
|
||||
self._attr_translation_key = f"{self._attr_translation_key}_instance"
|
||||
self._attr_translation_placeholders = {"instance": str(attribute.instance)}
|
||||
self._attr_device_class = description.device_class_fn(
|
||||
attribute, description.device_class
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | str | None:
|
||||
|
@@ -111,6 +111,9 @@
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"brightness": {
|
||||
"name": "Illuminance"
|
||||
},
|
||||
"brightness_instance": {
|
||||
"name": "Illuminance {instance}"
|
||||
},
|
||||
|
@@ -154,7 +154,6 @@ class HKDevice:
|
||||
self._pending_subscribes: set[tuple[int, int]] = set()
|
||||
self._subscribe_timer: CALLBACK_TYPE | None = None
|
||||
self._load_platforms_lock = asyncio.Lock()
|
||||
self._full_update_requested: bool = False
|
||||
|
||||
@property
|
||||
def entity_map(self) -> Accessories:
|
||||
@@ -841,48 +840,11 @@ class HKDevice:
|
||||
|
||||
async def async_request_update(self, now: datetime | None = None) -> None:
|
||||
"""Request an debounced update from the accessory."""
|
||||
self._full_update_requested = True
|
||||
await self._debounced_update.async_call()
|
||||
|
||||
async def async_update(self, now: datetime | None = None) -> None:
|
||||
"""Poll state of all entities attached to this bridge/accessory."""
|
||||
to_poll = self.pollable_characteristics
|
||||
accessories = self.entity_map.accessories
|
||||
|
||||
if (
|
||||
not self._full_update_requested
|
||||
and len(accessories) == 1
|
||||
and self.available
|
||||
and not (to_poll - self.watchable_characteristics)
|
||||
and self.pairing.is_available
|
||||
and await self.pairing.controller.async_reachable(
|
||||
self.unique_id, timeout=5.0
|
||||
)
|
||||
):
|
||||
# If its a single accessory and all chars are watchable,
|
||||
# only poll the firmware version to keep the connection alive
|
||||
# https://github.com/home-assistant/core/issues/123412
|
||||
#
|
||||
# Firmware revision is used here since iOS does this to keep camera
|
||||
# connections alive, and the goal is to not regress
|
||||
# https://github.com/home-assistant/core/issues/116143
|
||||
# by polling characteristics that are not normally polled frequently
|
||||
# and may not be tested by the device vendor.
|
||||
#
|
||||
_LOGGER.debug(
|
||||
"Accessory is reachable, limiting poll to firmware version: %s",
|
||||
self.unique_id,
|
||||
)
|
||||
first_accessory = accessories[0]
|
||||
accessory_info = first_accessory.services.first(
|
||||
service_type=ServicesTypes.ACCESSORY_INFORMATION
|
||||
)
|
||||
assert accessory_info is not None
|
||||
firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid
|
||||
to_poll = {(first_accessory.aid, firmware_iid)}
|
||||
|
||||
self._full_update_requested = False
|
||||
|
||||
if not to_poll:
|
||||
self.async_update_available_state()
|
||||
_LOGGER.debug(
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.7"],
|
||||
"requirements": ["aiohomekit==3.2.8"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -94,7 +94,12 @@ async def async_setup_devices(bridge: HueBridge):
|
||||
add_device(hue_resource)
|
||||
|
||||
# create/update all current devices found in controllers
|
||||
known_devices = [add_device(hue_device) for hue_device in dev_controller]
|
||||
# sort the devices to ensure bridges are added first
|
||||
hue_devices = list(dev_controller)
|
||||
hue_devices.sort(
|
||||
key=lambda dev: dev.metadata.archetype != DeviceArchetypes.BRIDGE_V2
|
||||
)
|
||||
known_devices = [add_device(hue_device) for hue_device in hue_devices]
|
||||
known_devices += [add_device(hue_room) for hue_room in api.groups.room]
|
||||
known_devices += [add_device(hue_zone) for hue_zone in api.groups.zone]
|
||||
|
||||
|
@@ -280,7 +280,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
if self.custom_event_template is not None:
|
||||
try:
|
||||
data["custom"] = self.custom_event_template.async_render(
|
||||
data, parse_result=True
|
||||
data | {"text": message.text}, parse_result=True
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"IMAP custom template (%s) for msguid %s (%s) rendered to: %s, initial: %s",
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/inkbird",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["inkbird-ble==0.7.0"]
|
||||
"requirements": ["inkbird-ble==0.7.1"]
|
||||
}
|
||||
|
@@ -9,6 +9,7 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
@@ -140,6 +141,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
intent.async_register(hass, GetCurrentDateIntentHandler())
|
||||
intent.async_register(hass, GetCurrentTimeIntentHandler())
|
||||
intent.async_register(hass, RespondIntentHandler())
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
|
||||
return True
|
||||
|
||||
@@ -444,6 +446,48 @@ class RespondIntentHandler(intent.IntentHandler):
|
||||
return response
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = intent.INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {CLIMATE_DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name,
|
||||
area_name=area,
|
||||
domains=[CLIMATE_DOMAIN],
|
||||
assistant=intent_obj.assistant,
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
async def _async_process_intent(
|
||||
hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol
|
||||
) -> None:
|
||||
|
@@ -11,7 +11,6 @@
|
||||
},
|
||||
"config_subentries": {
|
||||
"entity": {
|
||||
"title": "Add entity",
|
||||
"step": {
|
||||
"add_sensor": {
|
||||
"description": "Configure the new sensor",
|
||||
@@ -27,7 +26,12 @@
|
||||
"state": "Initial state"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add sensor",
|
||||
"reconfigure": "Reconfigure sensor"
|
||||
},
|
||||
"entry_type": "Sensor"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
@@ -110,7 +110,9 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity):
|
||||
self._attr_hvac_modes = [HVACMode.OFF]
|
||||
self._attr_hvac_mode = HVACMode.OFF
|
||||
self._attr_preset_modes = []
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_temperature_unit = (
|
||||
self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS
|
||||
)
|
||||
self._requested_hvac_mode: str | None = None
|
||||
|
||||
# Set up HVAC modes.
|
||||
@@ -182,6 +184,11 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity):
|
||||
self._attr_target_temperature_high = self.data.target_temp_high
|
||||
self._attr_target_temperature_low = self.data.target_temp_low
|
||||
|
||||
# Update unit.
|
||||
self._attr_temperature_unit = (
|
||||
self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] update status: c:%s, t:%s, l:%s, h:%s, hvac:%s, unit:%s, step:%s",
|
||||
self.coordinator.device_name,
|
||||
|
@@ -3,6 +3,8 @@
|
||||
from datetime import timedelta
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
|
||||
# Config flow
|
||||
DOMAIN = "lg_thinq"
|
||||
COMPANY = "LGE"
|
||||
@@ -18,3 +20,10 @@ MQTT_SUBSCRIPTION_INTERVAL: Final = timedelta(days=1)
|
||||
# MQTT: Message types
|
||||
DEVICE_PUSH_MESSAGE: Final = "DEVICE_PUSH"
|
||||
DEVICE_STATUS_MESSAGE: Final = "DEVICE_STATUS"
|
||||
|
||||
# Unit conversion map
|
||||
DEVICE_UNIT_TO_HA: dict[str, str] = {
|
||||
"F": UnitOfTemperature.FAHRENHEIT,
|
||||
"C": UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
REVERSE_DEVICE_UNIT_TO_HA = {v: k for k, v in DEVICE_UNIT_TO_HA.items()}
|
||||
|
@@ -2,19 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from thinqconnect import ThinQAPIException
|
||||
from thinqconnect.integration import HABridge
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.const import EVENT_CORE_CONFIG_UPDATE
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import ThinqConfigEntry
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, REVERSE_DEVICE_UNIT_TO_HA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -54,6 +56,40 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
f"{self.device_id}_{self.sub_id}" if self.sub_id else self.device_id
|
||||
)
|
||||
|
||||
# Set your preferred temperature unit. This will allow us to retrieve
|
||||
# temperature values from the API in a converted value corresponding to
|
||||
# preferred unit.
|
||||
self._update_preferred_temperature_unit()
|
||||
|
||||
# Add a callback to handle core config update.
|
||||
self.unit_system: str | None = None
|
||||
self.hass.bus.async_listen(
|
||||
event_type=EVENT_CORE_CONFIG_UPDATE,
|
||||
listener=self._handle_update_config,
|
||||
event_filter=self.async_config_update_filter,
|
||||
)
|
||||
|
||||
async def _handle_update_config(self, _: Event) -> None:
|
||||
"""Handle update core config."""
|
||||
self._update_preferred_temperature_unit()
|
||||
|
||||
await self.async_refresh()
|
||||
|
||||
@callback
|
||||
def async_config_update_filter(self, event_data: Mapping[str, Any]) -> bool:
|
||||
"""Filter out unwanted events."""
|
||||
if (unit_system := event_data.get("unit_system")) != self.unit_system:
|
||||
self.unit_system = unit_system
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _update_preferred_temperature_unit(self) -> None:
|
||||
"""Update preferred temperature unit."""
|
||||
self.api.set_preferred_temperature_unit(
|
||||
REVERSE_DEVICE_UNIT_TO_HA.get(self.hass.config.units.temperature_unit)
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Request to the server to update the status from full response data."""
|
||||
try:
|
||||
|
@@ -10,25 +10,19 @@ from thinqconnect import ThinQAPIException
|
||||
from thinqconnect.devices.const import Location
|
||||
from thinqconnect.integration import PropertyState
|
||||
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import COMPANY, DOMAIN
|
||||
from .const import COMPANY, DEVICE_UNIT_TO_HA, DOMAIN
|
||||
from .coordinator import DeviceDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
EMPTY_STATE = PropertyState()
|
||||
|
||||
UNIT_CONVERSION_MAP: dict[str, str] = {
|
||||
"F": UnitOfTemperature.FAHRENHEIT,
|
||||
"C": UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
|
||||
|
||||
class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
"""The base implementation of all lg thinq entities."""
|
||||
@@ -75,7 +69,7 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]):
|
||||
if unit is None:
|
||||
return None
|
||||
|
||||
return UNIT_CONVERSION_MAP.get(unit)
|
||||
return DEVICE_UNIT_TO_HA.get(unit)
|
||||
|
||||
def _update_status(self) -> None:
|
||||
"""Update status itself.
|
||||
|
@@ -66,7 +66,7 @@
|
||||
}
|
||||
},
|
||||
"set_state": {
|
||||
"name": "Set State",
|
||||
"name": "Set state",
|
||||
"description": "Sets a color/brightness and possibly turn the light on/off.",
|
||||
"fields": {
|
||||
"infrared": {
|
||||
@@ -209,11 +209,11 @@
|
||||
},
|
||||
"palette": {
|
||||
"name": "Palette",
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to use for this effect. Overrides the theme attribute."
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and Kelvin (1500-9000) values to use for this effect. Overrides the 'Theme' attribute."
|
||||
},
|
||||
"theme": {
|
||||
"name": "[%key:component::lifx::entity::select::theme::name%]",
|
||||
"description": "Predefined color theme to use for the effect. Overridden by the palette attribute."
|
||||
"description": "Predefined color theme to use for the effect. Overridden by the 'Palette' attribute."
|
||||
},
|
||||
"power_on": {
|
||||
"name": "Power on",
|
||||
@@ -243,7 +243,7 @@
|
||||
},
|
||||
"palette": {
|
||||
"name": "Palette",
|
||||
"description": "List of 1 to 6 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to use for this effect."
|
||||
"description": "List of 1 to 6 colors as hue (0-360), saturation (0-100), brightness (0-100) and Kelvin (1500-9000) values to use for this effect."
|
||||
},
|
||||
"power_on": {
|
||||
"name": "Power on",
|
||||
@@ -256,16 +256,16 @@
|
||||
"description": "Stops a running effect."
|
||||
},
|
||||
"paint_theme": {
|
||||
"name": "Paint Theme",
|
||||
"description": "Paint either a provided theme or custom palette across one or more LIFX lights.",
|
||||
"name": "Paint theme",
|
||||
"description": "Paints either a provided theme or custom palette across one or more LIFX lights.",
|
||||
"fields": {
|
||||
"palette": {
|
||||
"name": "Palette",
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to paint across the target lights. Overrides the theme attribute."
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and Kelvin (1500-9000) values to paint across the target lights. Overrides the 'Theme' attribute."
|
||||
},
|
||||
"theme": {
|
||||
"name": "[%key:component::lifx::entity::select::theme::name%]",
|
||||
"description": "Predefined color theme to paint. Overridden by the palette attribute."
|
||||
"description": "Predefined color theme to paint. Overridden by the 'Palette' attribute."
|
||||
},
|
||||
"transition": {
|
||||
"name": "Transition",
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "matter",
|
||||
"name": "Matter (BETA)",
|
||||
"name": "Matter",
|
||||
"after_dependencies": ["hassio"],
|
||||
"codeowners": ["@home-assistant/matter"],
|
||||
"config_flow": true,
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.01.26"],
|
||||
"requirements": ["yt-dlp[default]==2025.02.19"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -218,10 +218,16 @@ ABBREVIATIONS = {
|
||||
"sup_vol": "support_volume_set",
|
||||
"sup_feat": "supported_features",
|
||||
"sup_clrm": "supported_color_modes",
|
||||
"swing_h_mode_cmd_tpl": "swing_horizontal_mode_command_template",
|
||||
"swing_h_mode_cmd_t": "swing_horizontal_mode_command_topic",
|
||||
"swing_h_mode_stat_tpl": "swing_horizontal_mode_state_template",
|
||||
"swing_h_mode_stat_t": "swing_horizontal_mode_state_topic",
|
||||
"swing_h_modes": "swing_horizontal_modes",
|
||||
"swing_mode_cmd_tpl": "swing_mode_command_template",
|
||||
"swing_mode_cmd_t": "swing_mode_command_topic",
|
||||
"swing_mode_stat_tpl": "swing_mode_state_template",
|
||||
"swing_mode_stat_t": "swing_mode_state_topic",
|
||||
"swing_modes": "swing_modes",
|
||||
"temp_cmd_tpl": "temperature_command_template",
|
||||
"temp_cmd_t": "temperature_command_topic",
|
||||
"temp_hi_cmd_tpl": "temperature_high_command_template",
|
||||
|
@@ -113,11 +113,19 @@ CONF_PRESET_MODE_COMMAND_TOPIC = "preset_mode_command_topic"
|
||||
CONF_PRESET_MODE_VALUE_TEMPLATE = "preset_mode_value_template"
|
||||
CONF_PRESET_MODE_COMMAND_TEMPLATE = "preset_mode_command_template"
|
||||
CONF_PRESET_MODES_LIST = "preset_modes"
|
||||
|
||||
CONF_SWING_HORIZONTAL_MODE_COMMAND_TEMPLATE = "swing_horizontal_mode_command_template"
|
||||
CONF_SWING_HORIZONTAL_MODE_COMMAND_TOPIC = "swing_horizontal_mode_command_topic"
|
||||
CONF_SWING_HORIZONTAL_MODE_LIST = "swing_horizontal_modes"
|
||||
CONF_SWING_HORIZONTAL_MODE_STATE_TEMPLATE = "swing_horizontal_mode_state_template"
|
||||
CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC = "swing_horizontal_mode_state_topic"
|
||||
|
||||
CONF_SWING_MODE_COMMAND_TEMPLATE = "swing_mode_command_template"
|
||||
CONF_SWING_MODE_COMMAND_TOPIC = "swing_mode_command_topic"
|
||||
CONF_SWING_MODE_LIST = "swing_modes"
|
||||
CONF_SWING_MODE_STATE_TEMPLATE = "swing_mode_state_template"
|
||||
CONF_SWING_MODE_STATE_TOPIC = "swing_mode_state_topic"
|
||||
|
||||
CONF_TEMP_HIGH_COMMAND_TEMPLATE = "temperature_high_command_template"
|
||||
CONF_TEMP_HIGH_COMMAND_TOPIC = "temperature_high_command_topic"
|
||||
CONF_TEMP_HIGH_STATE_TEMPLATE = "temperature_high_state_template"
|
||||
@@ -145,6 +153,8 @@ MQTT_CLIMATE_ATTRIBUTES_BLOCKED = frozenset(
|
||||
climate.ATTR_MIN_TEMP,
|
||||
climate.ATTR_PRESET_MODE,
|
||||
climate.ATTR_PRESET_MODES,
|
||||
climate.ATTR_SWING_HORIZONTAL_MODE,
|
||||
climate.ATTR_SWING_HORIZONTAL_MODES,
|
||||
climate.ATTR_SWING_MODE,
|
||||
climate.ATTR_SWING_MODES,
|
||||
climate.ATTR_TARGET_TEMP_HIGH,
|
||||
@@ -162,6 +172,7 @@ VALUE_TEMPLATE_KEYS = (
|
||||
CONF_MODE_STATE_TEMPLATE,
|
||||
CONF_ACTION_TEMPLATE,
|
||||
CONF_PRESET_MODE_VALUE_TEMPLATE,
|
||||
CONF_SWING_HORIZONTAL_MODE_STATE_TEMPLATE,
|
||||
CONF_SWING_MODE_STATE_TEMPLATE,
|
||||
CONF_TEMP_HIGH_STATE_TEMPLATE,
|
||||
CONF_TEMP_LOW_STATE_TEMPLATE,
|
||||
@@ -174,6 +185,7 @@ COMMAND_TEMPLATE_KEYS = {
|
||||
CONF_MODE_COMMAND_TEMPLATE,
|
||||
CONF_POWER_COMMAND_TEMPLATE,
|
||||
CONF_PRESET_MODE_COMMAND_TEMPLATE,
|
||||
CONF_SWING_HORIZONTAL_MODE_COMMAND_TEMPLATE,
|
||||
CONF_SWING_MODE_COMMAND_TEMPLATE,
|
||||
CONF_TEMP_COMMAND_TEMPLATE,
|
||||
CONF_TEMP_HIGH_COMMAND_TEMPLATE,
|
||||
@@ -194,6 +206,8 @@ TOPIC_KEYS = (
|
||||
CONF_POWER_COMMAND_TOPIC,
|
||||
CONF_PRESET_MODE_COMMAND_TOPIC,
|
||||
CONF_PRESET_MODE_STATE_TOPIC,
|
||||
CONF_SWING_HORIZONTAL_MODE_COMMAND_TOPIC,
|
||||
CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC,
|
||||
CONF_SWING_MODE_COMMAND_TOPIC,
|
||||
CONF_SWING_MODE_STATE_TOPIC,
|
||||
CONF_TEMP_COMMAND_TOPIC,
|
||||
@@ -302,6 +316,13 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend(
|
||||
vol.Optional(CONF_PRESET_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_PRESET_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_PRESET_MODE_VALUE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_HORIZONTAL_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_HORIZONTAL_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
CONF_SWING_HORIZONTAL_MODE_LIST, default=[SWING_ON, SWING_OFF]
|
||||
): cv.ensure_list,
|
||||
vol.Optional(CONF_SWING_HORIZONTAL_MODE_STATE_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC): valid_subscribe_topic,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TEMPLATE): cv.template,
|
||||
vol.Optional(CONF_SWING_MODE_COMMAND_TOPIC): valid_publish_topic,
|
||||
vol.Optional(
|
||||
@@ -515,6 +536,7 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity):
|
||||
|
||||
_attr_fan_mode: str | None = None
|
||||
_attr_hvac_mode: HVACMode | None = None
|
||||
_attr_swing_horizontal_mode: str | None = None
|
||||
_attr_swing_mode: str | None = None
|
||||
_default_name = DEFAULT_NAME
|
||||
_entity_id_format = climate.ENTITY_ID_FORMAT
|
||||
@@ -543,6 +565,7 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity):
|
||||
if (precision := config.get(CONF_PRECISION)) is not None:
|
||||
self._attr_precision = precision
|
||||
self._attr_fan_modes = config[CONF_FAN_MODE_LIST]
|
||||
self._attr_swing_horizontal_modes = config[CONF_SWING_HORIZONTAL_MODE_LIST]
|
||||
self._attr_swing_modes = config[CONF_SWING_MODE_LIST]
|
||||
self._attr_target_temperature_step = config[CONF_TEMP_STEP]
|
||||
|
||||
@@ -568,6 +591,11 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity):
|
||||
|
||||
if self._topic[CONF_FAN_MODE_STATE_TOPIC] is None or self._optimistic:
|
||||
self._attr_fan_mode = FAN_LOW
|
||||
if (
|
||||
self._topic[CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC] is None
|
||||
or self._optimistic
|
||||
):
|
||||
self._attr_swing_horizontal_mode = SWING_OFF
|
||||
if self._topic[CONF_SWING_MODE_STATE_TOPIC] is None or self._optimistic:
|
||||
self._attr_swing_mode = SWING_OFF
|
||||
if self._topic[CONF_MODE_STATE_TOPIC] is None or self._optimistic:
|
||||
@@ -629,6 +657,11 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity):
|
||||
):
|
||||
support |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
if (self._topic[CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC] is not None) or (
|
||||
self._topic[CONF_SWING_HORIZONTAL_MODE_COMMAND_TOPIC] is not None
|
||||
):
|
||||
support |= ClimateEntityFeature.SWING_HORIZONTAL_MODE
|
||||
|
||||
if (self._topic[CONF_SWING_MODE_STATE_TOPIC] is not None) or (
|
||||
self._topic[CONF_SWING_MODE_COMMAND_TOPIC] is not None
|
||||
):
|
||||
@@ -744,6 +777,16 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity):
|
||||
),
|
||||
{"_attr_fan_mode"},
|
||||
)
|
||||
self.add_subscription(
|
||||
CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC,
|
||||
partial(
|
||||
self._handle_mode_received,
|
||||
CONF_SWING_HORIZONTAL_MODE_STATE_TEMPLATE,
|
||||
"_attr_swing_horizontal_mode",
|
||||
CONF_SWING_HORIZONTAL_MODE_LIST,
|
||||
),
|
||||
{"_attr_swing_horizontal_mode"},
|
||||
)
|
||||
self.add_subscription(
|
||||
CONF_SWING_MODE_STATE_TOPIC,
|
||||
partial(
|
||||
@@ -782,6 +825,20 @@ class MqttClimate(MqttTemperatureControlEntity, ClimateEntity):
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None:
|
||||
"""Set new swing horizontal mode."""
|
||||
payload = self._command_templates[CONF_SWING_HORIZONTAL_MODE_COMMAND_TEMPLATE](
|
||||
swing_horizontal_mode
|
||||
)
|
||||
await self._publish(CONF_SWING_HORIZONTAL_MODE_COMMAND_TOPIC, payload)
|
||||
|
||||
if (
|
||||
self._optimistic
|
||||
or self._topic[CONF_SWING_HORIZONTAL_MODE_STATE_TOPIC] is None
|
||||
):
|
||||
self._attr_swing_horizontal_mode = swing_horizontal_mode
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
"""Set new swing mode."""
|
||||
payload = self._command_templates[CONF_SWING_MODE_COMMAND_TEMPLATE](swing_mode)
|
||||
|
@@ -217,6 +217,10 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
|
||||
self._attr_color_mode = next(iter(self.supported_color_modes))
|
||||
else:
|
||||
self._attr_color_mode = ColorMode.UNKNOWN
|
||||
elif config.get(CONF_BRIGHTNESS):
|
||||
# Brightness is supported and no supported_color_modes are set,
|
||||
# so set brightness as the supported color mode.
|
||||
self._attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
def _update_color(self, values: dict[str, Any]) -> None:
|
||||
color_mode: str = values["color_mode"]
|
||||
|
@@ -9,7 +9,7 @@ from typing import TYPE_CHECKING
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_client.exceptions import CannotConnect, InvalidServerVersion
|
||||
from music_assistant_models.enums import EventType
|
||||
from music_assistant_models.errors import MusicAssistantError
|
||||
from music_assistant_models.errors import ActionUnavailable, MusicAssistantError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .actions import register_actions
|
||||
from .actions import get_music_assistant_client, register_actions
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -137,6 +137,18 @@ async def async_setup_entry(
|
||||
mass.subscribe(handle_player_removed, EventType.PLAYER_REMOVED)
|
||||
)
|
||||
|
||||
# check if any playerconfigs have been removed while we were disconnected
|
||||
all_player_configs = await mass.config.get_player_configs()
|
||||
player_ids = {player.player_id for player in all_player_configs}
|
||||
dev_reg = dr.async_get(hass)
|
||||
dev_entries = dr.async_entries_for_config_entry(dev_reg, entry.entry_id)
|
||||
for device in dev_entries:
|
||||
for identifier in device.identifiers:
|
||||
if identifier[0] == DOMAIN and identifier[1] not in player_ids:
|
||||
dev_reg.async_update_device(
|
||||
device.id, remove_config_entry_id=entry.entry_id
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -174,3 +186,31 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await mass_entry_data.mass.disconnect()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_remove_config_entry_device(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
|
||||
) -> bool:
|
||||
"""Remove a config entry from a device."""
|
||||
player_id = next(
|
||||
(
|
||||
identifier[1]
|
||||
for identifier in device_entry.identifiers
|
||||
if identifier[0] == DOMAIN
|
||||
),
|
||||
None,
|
||||
)
|
||||
if player_id is None:
|
||||
# this should not be possible at all, but guard it anyways
|
||||
return False
|
||||
mass = get_music_assistant_client(hass, config_entry.entry_id)
|
||||
if mass.players.get(player_id) is None:
|
||||
# player is already removed on the server, this is an orphaned device
|
||||
return True
|
||||
# try to remove the player from the server
|
||||
try:
|
||||
await mass.config.remove_player_config(player_id)
|
||||
except ActionUnavailable:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
@@ -23,6 +23,7 @@ from .const import (
|
||||
ATTR_ALBUM_TYPE,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
ATTR_FAVORITE,
|
||||
ATTR_ITEMS,
|
||||
@@ -32,6 +33,7 @@ from .const import (
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_RADIO,
|
||||
ATTR_SEARCH,
|
||||
ATTR_SEARCH_ALBUM,
|
||||
@@ -48,6 +50,15 @@ from .schemas import (
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.media_items import (
|
||||
Album,
|
||||
Artist,
|
||||
Audiobook,
|
||||
Playlist,
|
||||
Podcast,
|
||||
Radio,
|
||||
Track,
|
||||
)
|
||||
|
||||
from . import MusicAssistantConfigEntry
|
||||
|
||||
@@ -154,6 +165,14 @@ async def handle_search(call: ServiceCall) -> ServiceResponse:
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.radio
|
||||
],
|
||||
ATTR_AUDIOBOOKS: [
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.audiobooks
|
||||
],
|
||||
ATTR_PODCASTS: [
|
||||
media_item_dict_from_mass_item(mass, item)
|
||||
for item in search_results.podcasts
|
||||
],
|
||||
}
|
||||
)
|
||||
return response
|
||||
@@ -173,6 +192,15 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
"offset": offset,
|
||||
"order_by": order_by,
|
||||
}
|
||||
library_result: (
|
||||
list[Album]
|
||||
| list[Artist]
|
||||
| list[Track]
|
||||
| list[Radio]
|
||||
| list[Playlist]
|
||||
| list[Audiobook]
|
||||
| list[Podcast]
|
||||
)
|
||||
if media_type == MediaType.ALBUM:
|
||||
library_result = await mass.music.get_library_albums(
|
||||
**base_params,
|
||||
@@ -181,7 +209,7 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
elif media_type == MediaType.ARTIST:
|
||||
library_result = await mass.music.get_library_artists(
|
||||
**base_params,
|
||||
album_artists_only=call.data.get(ATTR_ALBUM_ARTISTS_ONLY),
|
||||
album_artists_only=bool(call.data.get(ATTR_ALBUM_ARTISTS_ONLY)),
|
||||
)
|
||||
elif media_type == MediaType.TRACK:
|
||||
library_result = await mass.music.get_library_tracks(
|
||||
@@ -195,6 +223,14 @@ async def handle_get_library(call: ServiceCall) -> ServiceResponse:
|
||||
library_result = await mass.music.get_library_playlists(
|
||||
**base_params,
|
||||
)
|
||||
elif media_type == MediaType.AUDIOBOOK:
|
||||
library_result = await mass.music.get_library_audiobooks(
|
||||
**base_params,
|
||||
)
|
||||
elif media_type == MediaType.PODCAST:
|
||||
library_result = await mass.music.get_library_podcasts(
|
||||
**base_params,
|
||||
)
|
||||
else:
|
||||
raise ServiceValidationError(f"Unsupported media type {media_type}")
|
||||
|
||||
|
@@ -34,6 +34,8 @@ ATTR_ARTISTS = "artists"
|
||||
ATTR_ALBUMS = "albums"
|
||||
ATTR_TRACKS = "tracks"
|
||||
ATTR_PLAYLISTS = "playlists"
|
||||
ATTR_AUDIOBOOKS = "audiobooks"
|
||||
ATTR_PODCASTS = "podcasts"
|
||||
ATTR_RADIO = "radio"
|
||||
ATTR_ITEMS = "items"
|
||||
ATTR_RADIO_MODE = "radio_mode"
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["music_assistant"],
|
||||
"requirements": ["music-assistant-client==1.0.8"],
|
||||
"requirements": ["music-assistant-client==1.1.1"],
|
||||
"zeroconf": ["_mass._tcp.local."]
|
||||
}
|
||||
|
@@ -166,6 +166,8 @@ async def build_playlist_items_listing(
|
||||
) -> BrowseMedia:
|
||||
"""Build Playlist items browse listing."""
|
||||
playlist = await mass.music.get_item_by_uri(identifier)
|
||||
if TYPE_CHECKING:
|
||||
assert playlist.uri is not None
|
||||
|
||||
return BrowseMedia(
|
||||
media_class=MediaClass.PLAYLIST,
|
||||
@@ -219,6 +221,9 @@ async def build_artist_items_listing(
|
||||
artist = await mass.music.get_item_by_uri(identifier)
|
||||
albums = await mass.music.get_artist_albums(artist.item_id, artist.provider)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert artist.uri is not None
|
||||
|
||||
return BrowseMedia(
|
||||
media_class=MediaType.ARTIST,
|
||||
media_content_id=artist.uri,
|
||||
@@ -267,6 +272,9 @@ async def build_album_items_listing(
|
||||
album = await mass.music.get_item_by_uri(identifier)
|
||||
tracks = await mass.music.get_album_tracks(album.item_id, album.provider)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert album.uri is not None
|
||||
|
||||
return BrowseMedia(
|
||||
media_class=MediaType.ALBUM,
|
||||
media_content_id=album.uri,
|
||||
@@ -340,6 +348,9 @@ def build_item(
|
||||
title = item.name
|
||||
img_url = mass.get_media_item_image_url(item)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert item.uri is not None
|
||||
|
||||
return BrowseMedia(
|
||||
media_class=media_class or item.media_type.value,
|
||||
media_content_id=item.uri,
|
||||
|
@@ -9,6 +9,7 @@ import functools
|
||||
import os
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from music_assistant_models.constants import PLAYER_CONTROL_NONE
|
||||
from music_assistant_models.enums import (
|
||||
EventType,
|
||||
MediaType,
|
||||
@@ -20,6 +21,7 @@ from music_assistant_models.enums import (
|
||||
from music_assistant_models.errors import MediaNotFoundError, MusicAssistantError
|
||||
from music_assistant_models.event import MassEvent
|
||||
from music_assistant_models.media_items import ItemMapping, MediaItemType, Track
|
||||
from music_assistant_models.player_queue import PlayerQueue
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
@@ -78,21 +80,15 @@ from .schemas import QUEUE_DETAILS_SCHEMA, queue_item_dict_from_mass_item
|
||||
if TYPE_CHECKING:
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_models.player import Player
|
||||
from music_assistant_models.player_queue import PlayerQueue
|
||||
|
||||
SUPPORTED_FEATURES = (
|
||||
MediaPlayerEntityFeature.PAUSE
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.STOP
|
||||
SUPPORTED_FEATURES_BASE = (
|
||||
MediaPlayerEntityFeature.STOP
|
||||
| MediaPlayerEntityFeature.PREVIOUS_TRACK
|
||||
| MediaPlayerEntityFeature.NEXT_TRACK
|
||||
| MediaPlayerEntityFeature.SHUFFLE_SET
|
||||
| MediaPlayerEntityFeature.REPEAT_SET
|
||||
| MediaPlayerEntityFeature.TURN_ON
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.PLAY
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
| MediaPlayerEntityFeature.VOLUME_STEP
|
||||
| MediaPlayerEntityFeature.CLEAR_PLAYLIST
|
||||
| MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
| MediaPlayerEntityFeature.MEDIA_ENQUEUE
|
||||
@@ -212,11 +208,7 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
"""Initialize MediaPlayer entity."""
|
||||
super().__init__(mass, player_id)
|
||||
self._attr_icon = self.player.icon.replace("mdi-", "mdi:")
|
||||
self._attr_supported_features = SUPPORTED_FEATURES
|
||||
if PlayerFeature.SET_MEMBERS in self.player.supported_features:
|
||||
self._attr_supported_features |= MediaPlayerEntityFeature.GROUPING
|
||||
if PlayerFeature.VOLUME_MUTE in self.player.supported_features:
|
||||
self._attr_supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
self._set_supported_features()
|
||||
self._attr_device_class = MediaPlayerDeviceClass.SPEAKER
|
||||
self._prev_time: float = 0
|
||||
|
||||
@@ -241,6 +233,19 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
)
|
||||
)
|
||||
|
||||
# we subscribe to the player config changed event to update
|
||||
# the supported features of the player
|
||||
async def player_config_changed(event: MassEvent) -> None:
|
||||
self._set_supported_features()
|
||||
await self.async_on_update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
self.mass.subscribe(
|
||||
player_config_changed, EventType.PLAYER_CONFIG_UPDATED, self.player_id
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def active_queue(self) -> PlayerQueue | None:
|
||||
"""Return the active queue for this player (if any)."""
|
||||
@@ -473,6 +478,8 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
album=album,
|
||||
media_type=MediaType(media_type) if media_type else None,
|
||||
):
|
||||
if TYPE_CHECKING:
|
||||
assert item.uri is not None
|
||||
media_uris.append(item.uri)
|
||||
|
||||
if not media_uris:
|
||||
@@ -680,3 +687,20 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
|
||||
if isinstance(queue_option, MediaPlayerEnqueue):
|
||||
queue_option = QUEUE_OPTION_MAP.get(queue_option)
|
||||
return queue_option
|
||||
|
||||
def _set_supported_features(self) -> None:
|
||||
"""Set supported features based on player capabilities."""
|
||||
supported_features = SUPPORTED_FEATURES_BASE
|
||||
if PlayerFeature.SET_MEMBERS in self.player.supported_features:
|
||||
supported_features |= MediaPlayerEntityFeature.GROUPING
|
||||
if PlayerFeature.PAUSE in self.player.supported_features:
|
||||
supported_features |= MediaPlayerEntityFeature.PAUSE
|
||||
if self.player.mute_control != PLAYER_CONTROL_NONE:
|
||||
supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
if self.player.volume_control != PLAYER_CONTROL_NONE:
|
||||
supported_features |= MediaPlayerEntityFeature.VOLUME_STEP
|
||||
supported_features |= MediaPlayerEntityFeature.VOLUME_SET
|
||||
if self.player.power_control != PLAYER_CONTROL_NONE:
|
||||
supported_features |= MediaPlayerEntityFeature.TURN_ON
|
||||
supported_features |= MediaPlayerEntityFeature.TURN_OFF
|
||||
self._attr_supported_features = supported_features
|
||||
|
@@ -15,6 +15,7 @@ from .const import (
|
||||
ATTR_ALBUM,
|
||||
ATTR_ALBUMS,
|
||||
ATTR_ARTISTS,
|
||||
ATTR_AUDIOBOOKS,
|
||||
ATTR_BIT_DEPTH,
|
||||
ATTR_CONTENT_TYPE,
|
||||
ATTR_CURRENT_INDEX,
|
||||
@@ -31,6 +32,7 @@ from .const import (
|
||||
ATTR_OFFSET,
|
||||
ATTR_ORDER_BY,
|
||||
ATTR_PLAYLISTS,
|
||||
ATTR_PODCASTS,
|
||||
ATTR_PROVIDER,
|
||||
ATTR_QUEUE_ID,
|
||||
ATTR_QUEUE_ITEM_ID,
|
||||
@@ -65,20 +67,20 @@ MEDIA_ITEM_SCHEMA = vol.Schema(
|
||||
|
||||
def media_item_dict_from_mass_item(
|
||||
mass: MusicAssistantClient,
|
||||
item: MediaItemType | ItemMapping | None,
|
||||
) -> dict[str, Any] | None:
|
||||
item: MediaItemType | ItemMapping,
|
||||
) -> dict[str, Any]:
|
||||
"""Parse a Music Assistant MediaItem."""
|
||||
if not item:
|
||||
return None
|
||||
base = {
|
||||
base: dict[str, Any] = {
|
||||
ATTR_MEDIA_TYPE: item.media_type,
|
||||
ATTR_URI: item.uri,
|
||||
ATTR_NAME: item.name,
|
||||
ATTR_VERSION: item.version,
|
||||
ATTR_IMAGE: mass.get_media_item_image_url(item),
|
||||
}
|
||||
artists: list[ItemMapping] | None
|
||||
if artists := getattr(item, "artists", None):
|
||||
base[ATTR_ARTISTS] = [media_item_dict_from_mass_item(mass, x) for x in artists]
|
||||
album: ItemMapping | None
|
||||
if album := getattr(item, "album", None):
|
||||
base[ATTR_ALBUM] = media_item_dict_from_mass_item(mass, album)
|
||||
return base
|
||||
@@ -101,6 +103,12 @@ SEARCH_RESULT_SCHEMA = vol.Schema(
|
||||
vol.Required(ATTR_RADIO): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
vol.Required(ATTR_AUDIOBOOKS): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
vol.Required(ATTR_PODCASTS): vol.All(
|
||||
cv.ensure_list, [vol.Schema(MEDIA_ITEM_SCHEMA)]
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
@@ -151,7 +159,11 @@ def queue_item_dict_from_mass_item(
|
||||
ATTR_QUEUE_ITEM_ID: item.queue_item_id,
|
||||
ATTR_NAME: item.name,
|
||||
ATTR_DURATION: item.duration,
|
||||
ATTR_MEDIA_ITEM: media_item_dict_from_mass_item(mass, item.media_item),
|
||||
ATTR_MEDIA_ITEM: (
|
||||
media_item_dict_from_mass_item(mass, item.media_item)
|
||||
if item.media_item
|
||||
else None
|
||||
),
|
||||
}
|
||||
if streamdetails := item.streamdetails:
|
||||
base[ATTR_STREAM_TITLE] = streamdetails.stream_title
|
||||
|
@@ -21,7 +21,10 @@ play_media:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- folder
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
artist:
|
||||
@@ -118,7 +121,9 @@ search:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
artist:
|
||||
@@ -160,7 +165,9 @@ get_library:
|
||||
options:
|
||||
- artist
|
||||
- album
|
||||
- audiobook
|
||||
- playlist
|
||||
- podcast
|
||||
- track
|
||||
- radio
|
||||
favorite:
|
||||
|
@@ -195,8 +195,11 @@
|
||||
"options": {
|
||||
"artist": "Artist",
|
||||
"album": "Album",
|
||||
"audiobook": "Audiobook",
|
||||
"folder": "Folder",
|
||||
"track": "Track",
|
||||
"playlist": "Playlist",
|
||||
"podcast": "Podcast",
|
||||
"radio": "Radio"
|
||||
}
|
||||
},
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/neato",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pybotvac"],
|
||||
"requirements": ["pybotvac==0.0.25"]
|
||||
"requirements": ["pybotvac==0.0.26"]
|
||||
}
|
||||
|
@@ -20,7 +20,7 @@ from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.util import Throttle, dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -119,6 +119,8 @@ class NSDepartureSensor(SensorEntity):
|
||||
self._time = time
|
||||
self._state = None
|
||||
self._trips = None
|
||||
self._first_trip = None
|
||||
self._next_trip = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -133,44 +135,44 @@ class NSDepartureSensor(SensorEntity):
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
if not self._trips:
|
||||
if not self._trips or self._first_trip is None:
|
||||
return None
|
||||
|
||||
if self._trips[0].trip_parts:
|
||||
route = [self._trips[0].departure]
|
||||
route.extend(k.destination for k in self._trips[0].trip_parts)
|
||||
if self._first_trip.trip_parts:
|
||||
route = [self._first_trip.departure]
|
||||
route.extend(k.destination for k in self._first_trip.trip_parts)
|
||||
|
||||
# Static attributes
|
||||
attributes = {
|
||||
"going": self._trips[0].going,
|
||||
"going": self._first_trip.going,
|
||||
"departure_time_planned": None,
|
||||
"departure_time_actual": None,
|
||||
"departure_delay": False,
|
||||
"departure_platform_planned": self._trips[0].departure_platform_planned,
|
||||
"departure_platform_actual": self._trips[0].departure_platform_actual,
|
||||
"departure_platform_planned": self._first_trip.departure_platform_planned,
|
||||
"departure_platform_actual": self._first_trip.departure_platform_actual,
|
||||
"arrival_time_planned": None,
|
||||
"arrival_time_actual": None,
|
||||
"arrival_delay": False,
|
||||
"arrival_platform_planned": self._trips[0].arrival_platform_planned,
|
||||
"arrival_platform_actual": self._trips[0].arrival_platform_actual,
|
||||
"arrival_platform_planned": self._first_trip.arrival_platform_planned,
|
||||
"arrival_platform_actual": self._first_trip.arrival_platform_actual,
|
||||
"next": None,
|
||||
"status": self._trips[0].status.lower(),
|
||||
"transfers": self._trips[0].nr_transfers,
|
||||
"status": self._first_trip.status.lower(),
|
||||
"transfers": self._first_trip.nr_transfers,
|
||||
"route": route,
|
||||
"remarks": None,
|
||||
}
|
||||
|
||||
# Planned departure attributes
|
||||
if self._trips[0].departure_time_planned is not None:
|
||||
attributes["departure_time_planned"] = self._trips[
|
||||
0
|
||||
].departure_time_planned.strftime("%H:%M")
|
||||
if self._first_trip.departure_time_planned is not None:
|
||||
attributes["departure_time_planned"] = (
|
||||
self._first_trip.departure_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual departure attributes
|
||||
if self._trips[0].departure_time_actual is not None:
|
||||
attributes["departure_time_actual"] = self._trips[
|
||||
0
|
||||
].departure_time_actual.strftime("%H:%M")
|
||||
if self._first_trip.departure_time_actual is not None:
|
||||
attributes["departure_time_actual"] = (
|
||||
self._first_trip.departure_time_actual.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Delay departure attributes
|
||||
if (
|
||||
@@ -182,16 +184,16 @@ class NSDepartureSensor(SensorEntity):
|
||||
attributes["departure_delay"] = True
|
||||
|
||||
# Planned arrival attributes
|
||||
if self._trips[0].arrival_time_planned is not None:
|
||||
attributes["arrival_time_planned"] = self._trips[
|
||||
0
|
||||
].arrival_time_planned.strftime("%H:%M")
|
||||
if self._first_trip.arrival_time_planned is not None:
|
||||
attributes["arrival_time_planned"] = (
|
||||
self._first_trip.arrival_time_planned.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Actual arrival attributes
|
||||
if self._trips[0].arrival_time_actual is not None:
|
||||
attributes["arrival_time_actual"] = self._trips[
|
||||
0
|
||||
].arrival_time_actual.strftime("%H:%M")
|
||||
if self._first_trip.arrival_time_actual is not None:
|
||||
attributes["arrival_time_actual"] = (
|
||||
self._first_trip.arrival_time_actual.strftime("%H:%M")
|
||||
)
|
||||
|
||||
# Delay arrival attributes
|
||||
if (
|
||||
@@ -202,15 +204,14 @@ class NSDepartureSensor(SensorEntity):
|
||||
attributes["arrival_delay"] = True
|
||||
|
||||
# Next attributes
|
||||
if len(self._trips) > 1:
|
||||
if self._trips[1].departure_time_actual is not None:
|
||||
attributes["next"] = self._trips[1].departure_time_actual.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
elif self._trips[1].departure_time_planned is not None:
|
||||
attributes["next"] = self._trips[1].departure_time_planned.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
if self._next_trip.departure_time_actual is not None:
|
||||
attributes["next"] = self._next_trip.departure_time_actual.strftime("%H:%M")
|
||||
elif self._next_trip.departure_time_planned is not None:
|
||||
attributes["next"] = self._next_trip.departure_time_planned.strftime(
|
||||
"%H:%M"
|
||||
)
|
||||
else:
|
||||
attributes["next"] = None
|
||||
|
||||
return attributes
|
||||
|
||||
@@ -225,6 +226,7 @@ class NSDepartureSensor(SensorEntity):
|
||||
):
|
||||
self._state = None
|
||||
self._trips = None
|
||||
self._first_trip = None
|
||||
return
|
||||
|
||||
# Set the search parameter to search from a specific trip time
|
||||
@@ -236,19 +238,51 @@ class NSDepartureSensor(SensorEntity):
|
||||
.strftime("%d-%m-%Y %H:%M")
|
||||
)
|
||||
else:
|
||||
trip_time = datetime.now().strftime("%d-%m-%Y %H:%M")
|
||||
trip_time = dt_util.now().strftime("%d-%m-%Y %H:%M")
|
||||
|
||||
try:
|
||||
self._trips = self._nsapi.get_trips(
|
||||
trip_time, self._departure, self._via, self._heading, True, 0, 2
|
||||
)
|
||||
if self._trips:
|
||||
if self._trips[0].departure_time_actual is None:
|
||||
planned_time = self._trips[0].departure_time_planned
|
||||
self._state = planned_time.strftime("%H:%M")
|
||||
all_times = []
|
||||
|
||||
# If a train is delayed we can observe this through departure_time_actual.
|
||||
for trip in self._trips:
|
||||
if trip.departure_time_actual is None:
|
||||
all_times.append(trip.departure_time_planned)
|
||||
else:
|
||||
all_times.append(trip.departure_time_actual)
|
||||
|
||||
# Remove all trains that already left.
|
||||
filtered_times = [
|
||||
(i, time)
|
||||
for i, time in enumerate(all_times)
|
||||
if time > dt_util.now()
|
||||
]
|
||||
|
||||
if len(filtered_times) > 0:
|
||||
sorted_times = sorted(filtered_times, key=lambda x: x[1])
|
||||
self._first_trip = self._trips[sorted_times[0][0]]
|
||||
self._state = sorted_times[0][1].strftime("%H:%M")
|
||||
|
||||
# Filter again to remove trains that leave at the exact same time.
|
||||
filtered_times = [
|
||||
(i, time)
|
||||
for i, time in enumerate(all_times)
|
||||
if time > sorted_times[0][1]
|
||||
]
|
||||
|
||||
if len(filtered_times) > 0:
|
||||
sorted_times = sorted(filtered_times, key=lambda x: x[1])
|
||||
self._next_trip = self._trips[sorted_times[0][0]]
|
||||
else:
|
||||
self._next_trip = None
|
||||
|
||||
else:
|
||||
actual_time = self._trips[0].departure_time_actual
|
||||
self._state = actual_time.strftime("%H:%M")
|
||||
self._first_trip = None
|
||||
self._state = None
|
||||
|
||||
except (
|
||||
requests.exceptions.ConnectionError,
|
||||
requests.exceptions.HTTPError,
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nest",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["google_nest_sdm"],
|
||||
"requirements": ["google-nest-sdm==7.1.3"]
|
||||
"requirements": ["google-nest-sdm==7.1.4"]
|
||||
}
|
||||
|
@@ -58,6 +58,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
|
@@ -12,5 +12,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/nexia",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["nexia"],
|
||||
"requirements": ["nexia==2.0.9"]
|
||||
"requirements": ["nexia==2.2.1"]
|
||||
}
|
||||
|
@@ -41,14 +41,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> bool:
|
||||
"""Set up OneDrive from a config entry."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
client = OneDriveClient(get_access_token, async_get_clientsession(hass))
|
||||
client, get_access_token = await _get_onedrive_client(hass, entry)
|
||||
|
||||
# get approot, will be created automatically if it does not exist
|
||||
approot = await _handle_item_operation(client.get_approot, "approot")
|
||||
@@ -164,20 +157,47 @@ async def async_migrate_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -
|
||||
_LOGGER.debug(
|
||||
"Migrating OneDrive config entry from version %s.%s", version, minor_version
|
||||
)
|
||||
|
||||
client, _ = await _get_onedrive_client(hass, entry)
|
||||
instance_id = await async_get_instance_id(hass)
|
||||
try:
|
||||
approot = await client.get_approot()
|
||||
folder = await client.get_drive_item(
|
||||
f"{approot.id}:/backups_{instance_id[:8]}:"
|
||||
)
|
||||
except OneDriveException:
|
||||
_LOGGER.exception("Migration to version 1.2 failed")
|
||||
return False
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_FOLDER_ID: "id", # will be updated during setup_entry
|
||||
CONF_FOLDER_ID: folder.id,
|
||||
CONF_FOLDER_NAME: f"backups_{instance_id[:8]}",
|
||||
},
|
||||
minor_version=2,
|
||||
)
|
||||
_LOGGER.debug("Migration to version 1.2 successful")
|
||||
return True
|
||||
|
||||
|
||||
async def _get_onedrive_client(
|
||||
hass: HomeAssistant, entry: OneDriveConfigEntry
|
||||
) -> tuple[OneDriveClient, Callable[[], Awaitable[str]]]:
|
||||
"""Get OneDrive client."""
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
session = OAuth2Session(hass, entry, implementation)
|
||||
|
||||
async def get_access_token() -> str:
|
||||
await session.async_ensure_token_valid()
|
||||
return cast(str, session.token[CONF_ACCESS_TOKEN])
|
||||
|
||||
return (
|
||||
OneDriveClient(get_access_token, async_get_clientsession(hass)),
|
||||
get_access_token,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_item_operation(
|
||||
func: Callable[[], Awaitable[Item]], folder: str
|
||||
) -> Item:
|
||||
|
33
homeassistant/components/onedrive/diagnostics.py
Normal file
33
homeassistant/components/onedrive/diagnostics.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Diagnostics support for OneDrive."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
TO_REDACT = {"display_name", "email", CONF_ACCESS_TOKEN, CONF_TOKEN}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: OneDriveConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator = entry.runtime_data.coordinator
|
||||
|
||||
data = {
|
||||
"drive": asdict(coordinator.data),
|
||||
"config": {
|
||||
**entry.data,
|
||||
**entry.options,
|
||||
},
|
||||
}
|
||||
|
||||
return async_redact_data(data, TO_REDACT)
|
@@ -9,5 +9,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["onedrive_personal_sdk"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["onedrive-personal-sdk==0.0.11"]
|
||||
"requirements": ["onedrive-personal-sdk==0.0.13"]
|
||||
}
|
||||
|
@@ -41,10 +41,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no data to diagnose.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
@@ -103,7 +103,7 @@ class OneDriveDriveStateSensor(
|
||||
self._attr_unique_id = f"{coordinator.data.id}_{description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name=coordinator.data.name,
|
||||
name=coordinator.data.name or coordinator.config_entry.title,
|
||||
identifiers={(DOMAIN, coordinator.data.id)},
|
||||
manufacturer="Microsoft",
|
||||
model=f"OneDrive {coordinator.data.drive_type.value.capitalize()}",
|
||||
|
@@ -398,6 +398,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
self._volume_resolution = volume_resolution
|
||||
self._max_volume = max_volume
|
||||
|
||||
self._options_sources = sources
|
||||
self._source_lib_mapping = _input_source_lib_mappings(zone)
|
||||
self._rev_source_lib_mapping = _rev_input_source_lib_mappings(zone)
|
||||
self._source_mapping = {
|
||||
@@ -409,6 +410,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
value: key for key, value in self._source_mapping.items()
|
||||
}
|
||||
|
||||
self._options_sound_modes = sound_modes
|
||||
self._sound_mode_lib_mapping = _listening_mode_lib_mappings(zone)
|
||||
self._rev_sound_mode_lib_mapping = _rev_listening_mode_lib_mappings(zone)
|
||||
self._sound_mode_mapping = {
|
||||
@@ -623,11 +625,20 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
return
|
||||
|
||||
source_meaning = source.value_meaning
|
||||
_LOGGER.error(
|
||||
'Input source "%s" is invalid for entity: %s',
|
||||
source_meaning,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
if source not in self._options_sources:
|
||||
_LOGGER.warning(
|
||||
'Input source "%s" for entity: %s is not in the list. Check integration options',
|
||||
source_meaning,
|
||||
self.entity_id,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(
|
||||
'Input source "%s" is invalid for entity: %s',
|
||||
source_meaning,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
self._attr_source = source_meaning
|
||||
|
||||
@callback
|
||||
@@ -638,11 +649,20 @@ class OnkyoMediaPlayer(MediaPlayerEntity):
|
||||
return
|
||||
|
||||
sound_mode_meaning = sound_mode.value_meaning
|
||||
_LOGGER.error(
|
||||
'Listening mode "%s" is invalid for entity: %s',
|
||||
sound_mode_meaning,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
if sound_mode not in self._options_sound_modes:
|
||||
_LOGGER.warning(
|
||||
'Listening mode "%s" for entity: %s is not in the list. Check integration options',
|
||||
sound_mode_meaning,
|
||||
self.entity_id,
|
||||
)
|
||||
else:
|
||||
_LOGGER.error(
|
||||
'Listening mode "%s" is invalid for entity: %s',
|
||||
sound_mode_meaning,
|
||||
self.entity_id,
|
||||
)
|
||||
|
||||
self._attr_sound_mode = sound_mode_meaning
|
||||
|
||||
@callback
|
||||
|
@@ -149,9 +149,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
commit_interval = conf[CONF_COMMIT_INTERVAL]
|
||||
db_max_retries = conf[CONF_DB_MAX_RETRIES]
|
||||
db_retry_wait = conf[CONF_DB_RETRY_WAIT]
|
||||
db_url = conf.get(CONF_DB_URL) or DEFAULT_URL.format(
|
||||
hass_config_path=hass.config.path(DEFAULT_DB_FILE)
|
||||
)
|
||||
db_url = conf.get(CONF_DB_URL) or get_default_url(hass)
|
||||
exclude = conf[CONF_EXCLUDE]
|
||||
exclude_event_types: set[EventType[Any] | str] = set(
|
||||
exclude.get(CONF_EVENT_TYPES, [])
|
||||
@@ -200,3 +198,8 @@ async def _async_setup_integration_platform(
|
||||
instance.queue_task(AddRecorderPlatformTask(domain, platform))
|
||||
|
||||
await async_process_integration_platforms(hass, DOMAIN, _process_recorder_platform)
|
||||
|
||||
|
||||
def get_default_url(hass: HomeAssistant) -> str:
|
||||
"""Return the default URL."""
|
||||
return DEFAULT_URL.format(hass_config_path=hass.config.path(DEFAULT_DB_FILE))
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
from logging import getLogger
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .util import async_migration_in_progress, get_instance
|
||||
@@ -14,6 +14,8 @@ async def async_pre_backup(hass: HomeAssistant) -> None:
|
||||
"""Perform operations before a backup starts."""
|
||||
_LOGGER.info("Backup start notification, locking database for writes")
|
||||
instance = get_instance(hass)
|
||||
if hass.state is not CoreState.running:
|
||||
raise HomeAssistantError("Home Assistant is not running")
|
||||
if async_migration_in_progress(hass):
|
||||
raise HomeAssistantError("Database migration in progress")
|
||||
await instance.lock_database()
|
||||
|
@@ -10,6 +10,7 @@ from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
|
||||
from . import get_default_url
|
||||
from .util import get_instance
|
||||
|
||||
|
||||
@@ -34,6 +35,7 @@ async def ws_info(
|
||||
await hass.data[recorder_helper.DATA_RECORDER].db_connected
|
||||
instance = get_instance(hass)
|
||||
backlog = instance.backlog
|
||||
db_in_default_location = instance.db_url == get_default_url(hass)
|
||||
migration_in_progress = instance.migration_in_progress
|
||||
migration_is_live = instance.migration_is_live
|
||||
recording = instance.recording
|
||||
@@ -44,6 +46,7 @@ async def ws_info(
|
||||
|
||||
recorder_info = {
|
||||
"backlog": backlog,
|
||||
"db_in_default_location": db_in_default_location,
|
||||
"max_backlog": max_backlog,
|
||||
"migration_in_progress": migration_in_progress,
|
||||
"migration_is_live": migration_is_live,
|
||||
|
@@ -30,6 +30,12 @@ CONF_DB_INTEGRITY_CHECK = "db_integrity_check"
|
||||
MAX_QUEUE_BACKLOG_MIN_VALUE = 65000
|
||||
MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG = 256 * 1024**2
|
||||
|
||||
# As soon as we have more than 999 ids, split the query as the
|
||||
# MySQL optimizer handles it poorly and will no longer
|
||||
# do an index only scan with a group-by
|
||||
# https://github.com/home-assistant/core/issues/132865#issuecomment-2543160459
|
||||
MAX_IDS_FOR_INDEXED_GROUP_BY = 999
|
||||
|
||||
# The maximum number of rows (events) we purge in one delete statement
|
||||
|
||||
DEFAULT_MAX_BIND_VARS = 4000
|
||||
|
@@ -6,11 +6,12 @@ from collections.abc import Callable, Iterable, Iterator
|
||||
from datetime import datetime
|
||||
from itertools import groupby
|
||||
from operator import itemgetter
|
||||
from typing import Any, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from sqlalchemy import (
|
||||
CompoundSelect,
|
||||
Select,
|
||||
StatementLambdaElement,
|
||||
Subquery,
|
||||
and_,
|
||||
func,
|
||||
@@ -26,8 +27,9 @@ from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
from homeassistant.helpers.recorder import get_instance
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.collection import chunked_or_all
|
||||
|
||||
from ..const import LAST_REPORTED_SCHEMA_VERSION
|
||||
from ..const import LAST_REPORTED_SCHEMA_VERSION, MAX_IDS_FOR_INDEXED_GROUP_BY
|
||||
from ..db_schema import (
|
||||
SHARED_ATTR_OR_LEGACY_ATTRIBUTES,
|
||||
StateAttributes,
|
||||
@@ -149,6 +151,7 @@ def _significant_states_stmt(
|
||||
no_attributes: bool,
|
||||
include_start_time_state: bool,
|
||||
run_start_ts: float | None,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> Select | CompoundSelect:
|
||||
"""Query the database for significant state changes."""
|
||||
include_last_changed = not significant_changes_only
|
||||
@@ -187,6 +190,7 @@ def _significant_states_stmt(
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
slow_dependent_subquery,
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
@@ -257,7 +261,68 @@ def get_significant_states_with_session(
|
||||
start_time_ts = start_time.timestamp()
|
||||
end_time_ts = datetime_to_timestamp_or_none(end_time)
|
||||
single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None
|
||||
stmt = lambda_stmt(
|
||||
rows: list[Row] = []
|
||||
if TYPE_CHECKING:
|
||||
assert instance.database_engine is not None
|
||||
slow_dependent_subquery = instance.database_engine.optimizer.slow_dependent_subquery
|
||||
if include_start_time_state and slow_dependent_subquery:
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
# If we include the start time state we need to limit the
|
||||
# number of metadata_ids we query for at a time to avoid
|
||||
# hitting limits in the MySQL optimizer that prevent
|
||||
# the start time state query from using an index-only optimization
|
||||
# to find the start time state.
|
||||
iter_metadata_ids = chunked_or_all(metadata_ids, MAX_IDS_FOR_INDEXED_GROUP_BY)
|
||||
else:
|
||||
iter_metadata_ids = (metadata_ids,)
|
||||
for metadata_ids_chunk in iter_metadata_ids:
|
||||
stmt = _generate_significant_states_with_session_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
single_metadata_id,
|
||||
metadata_ids_chunk,
|
||||
metadata_ids_in_significant_domains,
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
slow_dependent_subquery,
|
||||
)
|
||||
row_chunk = cast(
|
||||
list[Row],
|
||||
execute_stmt_lambda_element(session, stmt, None, end_time, orm_rows=False),
|
||||
)
|
||||
if rows:
|
||||
rows += row_chunk
|
||||
else:
|
||||
# If we have no rows yet, we can just assign the chunk
|
||||
# as this is the common case since its rare that
|
||||
# we exceed the MAX_IDS_FOR_INDEXED_GROUP_BY limit
|
||||
rows = row_chunk
|
||||
return _sorted_states_to_dict(
|
||||
rows,
|
||||
start_time_ts if include_start_time_state else None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
minimal_response,
|
||||
compressed_state_format,
|
||||
no_attributes=no_attributes,
|
||||
)
|
||||
|
||||
|
||||
def _generate_significant_states_with_session_stmt(
|
||||
start_time_ts: float,
|
||||
end_time_ts: float | None,
|
||||
single_metadata_id: int | None,
|
||||
metadata_ids: list[int],
|
||||
metadata_ids_in_significant_domains: list[int],
|
||||
significant_changes_only: bool,
|
||||
no_attributes: bool,
|
||||
include_start_time_state: bool,
|
||||
oldest_ts: float | None,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> StatementLambdaElement:
|
||||
return lambda_stmt(
|
||||
lambda: _significant_states_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
@@ -268,6 +333,7 @@ def get_significant_states_with_session(
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
slow_dependent_subquery,
|
||||
),
|
||||
track_on=[
|
||||
bool(single_metadata_id),
|
||||
@@ -276,17 +342,9 @@ def get_significant_states_with_session(
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
slow_dependent_subquery,
|
||||
],
|
||||
)
|
||||
return _sorted_states_to_dict(
|
||||
execute_stmt_lambda_element(session, stmt, None, end_time, orm_rows=False),
|
||||
start_time_ts if include_start_time_state else None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
minimal_response,
|
||||
compressed_state_format,
|
||||
no_attributes=no_attributes,
|
||||
)
|
||||
|
||||
|
||||
def get_full_significant_states_with_session(
|
||||
@@ -554,13 +612,14 @@ def get_last_state_changes(
|
||||
)
|
||||
|
||||
|
||||
def _get_start_time_state_for_entities_stmt(
|
||||
def _get_start_time_state_for_entities_stmt_dependent_sub_query(
|
||||
epoch_time: float,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
# Engine has a fast dependent subquery optimizer
|
||||
# This query is the result of significant research in
|
||||
# https://github.com/home-assistant/core/issues/132865
|
||||
# A reverse index scan with a limit 1 is the fastest way to get the
|
||||
@@ -570,7 +629,9 @@ def _get_start_time_state_for_entities_stmt(
|
||||
# before a specific point in time for all entities.
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes, include_last_changed, False
|
||||
no_attributes=no_attributes,
|
||||
include_last_changed=include_last_changed,
|
||||
include_last_reported=False,
|
||||
)
|
||||
.select_from(StatesMeta)
|
||||
.join(
|
||||
@@ -600,6 +661,55 @@ def _get_start_time_state_for_entities_stmt(
|
||||
)
|
||||
|
||||
|
||||
def _get_start_time_state_for_entities_stmt_group_by(
|
||||
epoch_time: float,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
# Simple group-by for MySQL, must use less
|
||||
# than 1000 metadata_ids in the IN clause for MySQL
|
||||
# or it will optimize poorly. Callers are responsible
|
||||
# for ensuring that the number of metadata_ids is less
|
||||
# than 1000.
|
||||
most_recent_states_for_entities_by_date = (
|
||||
select(
|
||||
States.metadata_id.label("max_metadata_id"),
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts < epoch_time) & States.metadata_id.in_(metadata_ids)
|
||||
)
|
||||
.group_by(States.metadata_id)
|
||||
.subquery()
|
||||
)
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes=no_attributes,
|
||||
include_last_changed=include_last_changed,
|
||||
include_last_reported=False,
|
||||
)
|
||||
.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
and_(
|
||||
States.metadata_id
|
||||
== most_recent_states_for_entities_by_date.c.max_metadata_id,
|
||||
States.last_updated_ts
|
||||
== most_recent_states_for_entities_by_date.c.max_last_updated,
|
||||
),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts < epoch_time) & States.metadata_id.in_(metadata_ids)
|
||||
)
|
||||
)
|
||||
if no_attributes:
|
||||
return stmt
|
||||
return stmt.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
|
||||
|
||||
def _get_oldest_possible_ts(
|
||||
hass: HomeAssistant, utc_point_in_time: datetime
|
||||
) -> float | None:
|
||||
@@ -620,6 +730,7 @@ def _get_start_time_state_stmt(
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> Select:
|
||||
"""Return the states at a specific point in time."""
|
||||
if single_metadata_id:
|
||||
@@ -634,7 +745,15 @@ def _get_start_time_state_stmt(
|
||||
)
|
||||
# We have more than one entity to look at so we need to do a query on states
|
||||
# since the last recorder run started.
|
||||
return _get_start_time_state_for_entities_stmt(
|
||||
if slow_dependent_subquery:
|
||||
return _get_start_time_state_for_entities_stmt_group_by(
|
||||
epoch_time,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
)
|
||||
|
||||
return _get_start_time_state_for_entities_stmt_dependent_sub_query(
|
||||
epoch_time,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
|
@@ -37,3 +37,13 @@ class DatabaseOptimizer:
|
||||
# https://wiki.postgresql.org/wiki/Loose_indexscan
|
||||
# https://github.com/home-assistant/core/issues/126084
|
||||
slow_range_in_select: bool
|
||||
|
||||
# MySQL 8.x+ can end up with a file-sort on a dependent subquery
|
||||
# which makes the query painfully slow.
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
# The solution is to use multiple indexed group-by queries instead
|
||||
# of the subquery as long as the group by does not exceed
|
||||
# 999 elements since as soon as we hit 1000 elements MySQL
|
||||
# will no longer use the group_index_range optimization.
|
||||
# https://github.com/home-assistant/core/issues/132865#issuecomment-2543160459
|
||||
slow_dependent_subquery: bool
|
||||
|
@@ -28,6 +28,7 @@ from homeassistant.helpers.recorder import DATA_RECORDER
|
||||
from homeassistant.helpers.singleton import singleton
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.collection import chunked_or_all
|
||||
from homeassistant.util.unit_conversion import (
|
||||
AreaConverter,
|
||||
BaseUnitConverter,
|
||||
@@ -59,6 +60,7 @@ from .const import (
|
||||
INTEGRATION_PLATFORM_LIST_STATISTIC_IDS,
|
||||
INTEGRATION_PLATFORM_UPDATE_STATISTICS_ISSUES,
|
||||
INTEGRATION_PLATFORM_VALIDATE_STATISTICS,
|
||||
MAX_IDS_FOR_INDEXED_GROUP_BY,
|
||||
SupportedDialect,
|
||||
)
|
||||
from .db_schema import (
|
||||
@@ -1669,6 +1671,7 @@ def _augment_result_with_change(
|
||||
drop_sum = "sum" not in _types
|
||||
prev_sums = {}
|
||||
if tmp := _statistics_at_time(
|
||||
get_instance(hass),
|
||||
session,
|
||||
{metadata[statistic_id][0] for statistic_id in result},
|
||||
table,
|
||||
@@ -2027,7 +2030,39 @@ def get_latest_short_term_statistics_with_session(
|
||||
)
|
||||
|
||||
|
||||
def _generate_statistics_at_time_stmt(
|
||||
def _generate_statistics_at_time_stmt_group_by(
|
||||
table: type[StatisticsBase],
|
||||
metadata_ids: set[int],
|
||||
start_time_ts: float,
|
||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||
) -> StatementLambdaElement:
|
||||
"""Create the statement for finding the statistics for a given time."""
|
||||
# Simple group-by for MySQL, must use less
|
||||
# than 1000 metadata_ids in the IN clause for MySQL
|
||||
# or it will optimize poorly. Callers are responsible
|
||||
# for ensuring that the number of metadata_ids is less
|
||||
# than 1000.
|
||||
return _generate_select_columns_for_types_stmt(table, types) + (
|
||||
lambda q: q.join(
|
||||
most_recent_statistic_ids := (
|
||||
select(
|
||||
func.max(table.start_ts).label("max_start_ts"),
|
||||
table.metadata_id.label("max_metadata_id"),
|
||||
)
|
||||
.filter(table.start_ts < start_time_ts)
|
||||
.filter(table.metadata_id.in_(metadata_ids))
|
||||
.group_by(table.metadata_id)
|
||||
.subquery()
|
||||
),
|
||||
and_(
|
||||
table.start_ts == most_recent_statistic_ids.c.max_start_ts,
|
||||
table.metadata_id == most_recent_statistic_ids.c.max_metadata_id,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _generate_statistics_at_time_stmt_dependent_sub_query(
|
||||
table: type[StatisticsBase],
|
||||
metadata_ids: set[int],
|
||||
start_time_ts: float,
|
||||
@@ -2041,8 +2076,7 @@ def _generate_statistics_at_time_stmt(
|
||||
# databases. Since all databases support this query as a join
|
||||
# condition we can use it as a subquery to get the last start_time_ts
|
||||
# before a specific point in time for all entities.
|
||||
stmt = _generate_select_columns_for_types_stmt(table, types)
|
||||
stmt += (
|
||||
return _generate_select_columns_for_types_stmt(table, types) + (
|
||||
lambda q: q.select_from(StatisticsMeta)
|
||||
.join(
|
||||
table,
|
||||
@@ -2064,10 +2098,10 @@ def _generate_statistics_at_time_stmt(
|
||||
)
|
||||
.where(table.metadata_id.in_(metadata_ids))
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _statistics_at_time(
|
||||
instance: Recorder,
|
||||
session: Session,
|
||||
metadata_ids: set[int],
|
||||
table: type[StatisticsBase],
|
||||
@@ -2076,8 +2110,41 @@ def _statistics_at_time(
|
||||
) -> Sequence[Row] | None:
|
||||
"""Return last known statistics, earlier than start_time, for the metadata_ids."""
|
||||
start_time_ts = start_time.timestamp()
|
||||
stmt = _generate_statistics_at_time_stmt(table, metadata_ids, start_time_ts, types)
|
||||
return cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
if TYPE_CHECKING:
|
||||
assert instance.database_engine is not None
|
||||
if not instance.database_engine.optimizer.slow_dependent_subquery:
|
||||
stmt = _generate_statistics_at_time_stmt_dependent_sub_query(
|
||||
table=table,
|
||||
metadata_ids=metadata_ids,
|
||||
start_time_ts=start_time_ts,
|
||||
types=types,
|
||||
)
|
||||
return cast(list[Row], execute_stmt_lambda_element(session, stmt))
|
||||
rows: list[Row] = []
|
||||
# https://github.com/home-assistant/core/issues/132865
|
||||
# If we include the start time state we need to limit the
|
||||
# number of metadata_ids we query for at a time to avoid
|
||||
# hitting limits in the MySQL optimizer that prevent
|
||||
# the start time state query from using an index-only optimization
|
||||
# to find the start time state.
|
||||
for metadata_ids_chunk in chunked_or_all(
|
||||
metadata_ids, MAX_IDS_FOR_INDEXED_GROUP_BY
|
||||
):
|
||||
stmt = _generate_statistics_at_time_stmt_group_by(
|
||||
table=table,
|
||||
metadata_ids=metadata_ids_chunk,
|
||||
start_time_ts=start_time_ts,
|
||||
types=types,
|
||||
)
|
||||
row_chunk = cast(list[Row], execute_stmt_lambda_element(session, stmt))
|
||||
if rows:
|
||||
rows += row_chunk
|
||||
else:
|
||||
# If we have no rows yet, we can just assign the chunk
|
||||
# as this is the common case since its rare that
|
||||
# we exceed the MAX_IDS_FOR_INDEXED_GROUP_BY limit
|
||||
rows = row_chunk
|
||||
return rows
|
||||
|
||||
|
||||
def _build_sum_converted_stats(
|
||||
|
@@ -464,6 +464,7 @@ def setup_connection_for_dialect(
|
||||
"""Execute statements needed for dialect connection."""
|
||||
version: AwesomeVersion | None = None
|
||||
slow_range_in_select = False
|
||||
slow_dependent_subquery = False
|
||||
if dialect_name == SupportedDialect.SQLITE:
|
||||
if first_connection:
|
||||
old_isolation = dbapi_connection.isolation_level # type: ignore[attr-defined]
|
||||
@@ -505,9 +506,8 @@ def setup_connection_for_dialect(
|
||||
result = query_on_connection(dbapi_connection, "SELECT VERSION()")
|
||||
version_string = result[0][0]
|
||||
version = _extract_version_from_server_response(version_string)
|
||||
is_maria_db = "mariadb" in version_string.lower()
|
||||
|
||||
if is_maria_db:
|
||||
if "mariadb" in version_string.lower():
|
||||
if not version or version < MIN_VERSION_MARIA_DB:
|
||||
_raise_if_version_unsupported(
|
||||
version or version_string, "MariaDB", MIN_VERSION_MARIA_DB
|
||||
@@ -523,19 +523,21 @@ def setup_connection_for_dialect(
|
||||
instance.hass,
|
||||
version,
|
||||
)
|
||||
|
||||
slow_range_in_select = bool(
|
||||
not version
|
||||
or version < MARIADB_WITH_FIXED_IN_QUERIES_105
|
||||
or MARIA_DB_106 <= version < MARIADB_WITH_FIXED_IN_QUERIES_106
|
||||
or MARIA_DB_107 <= version < MARIADB_WITH_FIXED_IN_QUERIES_107
|
||||
or MARIA_DB_108 <= version < MARIADB_WITH_FIXED_IN_QUERIES_108
|
||||
)
|
||||
elif not version or version < MIN_VERSION_MYSQL:
|
||||
_raise_if_version_unsupported(
|
||||
version or version_string, "MySQL", MIN_VERSION_MYSQL
|
||||
)
|
||||
|
||||
slow_range_in_select = bool(
|
||||
not version
|
||||
or version < MARIADB_WITH_FIXED_IN_QUERIES_105
|
||||
or MARIA_DB_106 <= version < MARIADB_WITH_FIXED_IN_QUERIES_106
|
||||
or MARIA_DB_107 <= version < MARIADB_WITH_FIXED_IN_QUERIES_107
|
||||
or MARIA_DB_108 <= version < MARIADB_WITH_FIXED_IN_QUERIES_108
|
||||
)
|
||||
else:
|
||||
# MySQL
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
slow_dependent_subquery = True
|
||||
|
||||
# Ensure all times are using UTC to avoid issues with daylight savings
|
||||
execute_on_connection(dbapi_connection, "SET time_zone = '+00:00'")
|
||||
@@ -565,7 +567,10 @@ def setup_connection_for_dialect(
|
||||
return DatabaseEngine(
|
||||
dialect=SupportedDialect(dialect_name),
|
||||
version=version,
|
||||
optimizer=DatabaseOptimizer(slow_range_in_select=slow_range_in_select),
|
||||
optimizer=DatabaseOptimizer(
|
||||
slow_range_in_select=slow_range_in_select,
|
||||
slow_dependent_subquery=slow_dependent_subquery,
|
||||
),
|
||||
max_bind_vars=DEFAULT_MAX_BIND_VARS,
|
||||
)
|
||||
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.12.0"]
|
||||
"requirements": ["reolink-aio==0.12.1"]
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user