mirror of
https://github.com/home-assistant/core.git
synced 2025-11-03 07:59:30 +00:00
Compare commits
1 Commits
dev
...
2025.11.0b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95ac5c0183 |
46
.github/workflows/builder.yml
vendored
46
.github/workflows/builder.yml
vendored
@@ -162,6 +162,18 @@ jobs:
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||
fi
|
||||
|
||||
- name: Adjustments for armhf
|
||||
if: matrix.arch == 'armhf'
|
||||
run: |
|
||||
# Pandas has issues building on armhf, it is expected they
|
||||
# will drop the platform in the near future (they consider it
|
||||
# "flimsy" on 386). The following packages depend on pandas,
|
||||
# so we comment them out.
|
||||
sed -i "s|env-canada|# env-canada|g" requirements_all.txt
|
||||
sed -i "s|noaa-coops|# noaa-coops|g" requirements_all.txt
|
||||
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
@@ -214,11 +226,19 @@ jobs:
|
||||
- odroid-c4
|
||||
- odroid-m1
|
||||
- odroid-n2
|
||||
- odroid-xu
|
||||
- qemuarm
|
||||
- qemuarm-64
|
||||
- qemux86
|
||||
- qemux86-64
|
||||
- raspberrypi
|
||||
- raspberrypi2
|
||||
- raspberrypi3
|
||||
- raspberrypi3-64
|
||||
- raspberrypi4
|
||||
- raspberrypi4-64
|
||||
- raspberrypi5-64
|
||||
- tinker
|
||||
- yellow
|
||||
- green
|
||||
steps:
|
||||
@@ -277,7 +297,6 @@ jobs:
|
||||
key-description: "Home Assistant Core"
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
channel: ${{ needs.init.outputs.channel }}
|
||||
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||
|
||||
- name: Update version file (stable -> beta)
|
||||
if: needs.init.outputs.channel == 'stable'
|
||||
@@ -287,7 +306,6 @@ jobs:
|
||||
key-description: "Home Assistant Core"
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
channel: beta
|
||||
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||
|
||||
publish_container:
|
||||
name: Publish meta container for ${{ matrix.registry }}
|
||||
@@ -339,12 +357,27 @@ jobs:
|
||||
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
--os linux --arch 386
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v6
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v7
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
@@ -372,14 +405,23 @@ jobs:
|
||||
|
||||
# Pull images from github container registry and verify signature
|
||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
|
||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 1
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -502,6 +502,7 @@ jobs:
|
||||
libavfilter-dev \
|
||||
libavformat-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
@@ -800,7 +801,8 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-python-default
|
||||
@@ -851,6 +853,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- *checkout
|
||||
- *setup-python-matrix
|
||||
@@ -1230,6 +1233,7 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- *checkout
|
||||
- *setup-python-matrix
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -228,7 +228,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -107,7 +107,6 @@ homeassistant.components.automation.*
|
||||
homeassistant.components.awair.*
|
||||
homeassistant.components.axis.*
|
||||
homeassistant.components.azure_storage.*
|
||||
homeassistant.components.backblaze_b2.*
|
||||
homeassistant.components.backup.*
|
||||
homeassistant.components.baf.*
|
||||
homeassistant.components.bang_olufsen.*
|
||||
@@ -362,7 +361,6 @@ homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
homeassistant.components.network.*
|
||||
@@ -396,6 +394,7 @@ homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.paperless_ngx.*
|
||||
homeassistant.components.peblar.*
|
||||
|
||||
16
CODEOWNERS
generated
16
CODEOWNERS
generated
@@ -196,8 +196,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||
/homeassistant/components/azure_storage/ @zweckj
|
||||
/tests/components/azure_storage/ @zweckj
|
||||
/homeassistant/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||
/tests/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||
/homeassistant/components/backup/ @home-assistant/core
|
||||
/tests/components/backup/ @home-assistant/core
|
||||
/homeassistant/components/baf/ @bdraco @jfroy
|
||||
@@ -318,6 +316,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/cpuspeed/ @fabaff
|
||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||
/homeassistant/components/cups/ @fabaff
|
||||
/tests/components/cups/ @fabaff
|
||||
/homeassistant/components/cync/ @Kinachi249
|
||||
/tests/components/cync/ @Kinachi249
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
@@ -510,6 +510,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/fjaraskupan/ @elupus
|
||||
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||
/homeassistant/components/flick_electric/ @ZephireNZ
|
||||
/tests/components/flick_electric/ @ZephireNZ
|
||||
/homeassistant/components/flipr/ @cnico
|
||||
/tests/components/flipr/ @cnico
|
||||
/homeassistant/components/flo/ @dmulcahey
|
||||
@@ -1477,6 +1479,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/smlight/ @tl-sl
|
||||
/tests/components/smlight/ @tl-sl
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/tests/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
@@ -1717,8 +1721,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||
/homeassistant/components/valve/ @home-assistant/core
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/vegehub/ @thulrus
|
||||
/tests/components/vegehub/ @thulrus
|
||||
/homeassistant/components/vegehub/ @ghowevege
|
||||
/tests/components/vegehub/ @ghowevege
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
@@ -1817,8 +1821,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -31,7 +31,7 @@ RUN \
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.6
|
||||
RUN pip3 install uv==0.9.5
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ RUN \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswscale-dev \
|
||||
libswresample-dev \
|
||||
libavfilter-dev \
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale (non-US/Canada)",
|
||||
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "yale_august",
|
||||
"name": "Yale August (US/Canada)",
|
||||
"integrations": ["august", "august_ble"]
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
"""The Backblaze B2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import B2Api, Bucket, InMemoryAccountInfo, exception
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from .const import (
|
||||
BACKBLAZE_REALM,
|
||||
CONF_APPLICATION_KEY,
|
||||
CONF_BUCKET,
|
||||
CONF_KEY_ID,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
from .repairs import (
|
||||
async_check_for_repair_issues,
|
||||
create_bucket_access_restricted_issue,
|
||||
create_bucket_not_found_issue,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type BackblazeConfigEntry = ConfigEntry[Bucket]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||
"""Set up Backblaze B2 from a config entry."""
|
||||
|
||||
info = InMemoryAccountInfo()
|
||||
b2_api = B2Api(info)
|
||||
|
||||
def _authorize_and_get_bucket_sync() -> Bucket:
|
||||
"""Synchronously authorize the Backblaze B2 account and retrieve the bucket.
|
||||
|
||||
This function runs in the event loop's executor as b2sdk operations are blocking.
|
||||
"""
|
||||
b2_api.authorize_account(
|
||||
BACKBLAZE_REALM,
|
||||
entry.data[CONF_KEY_ID],
|
||||
entry.data[CONF_APPLICATION_KEY],
|
||||
)
|
||||
return b2_api.get_bucket_by_name(entry.data[CONF_BUCKET])
|
||||
|
||||
try:
|
||||
bucket = await hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||
except exception.Unauthorized as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_credentials",
|
||||
) from err
|
||||
except exception.RestrictedBucket as err:
|
||||
create_bucket_access_restricted_issue(hass, entry, err.bucket_name)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="restricted_bucket",
|
||||
translation_placeholders={
|
||||
"restricted_bucket_name": err.bucket_name,
|
||||
},
|
||||
) from err
|
||||
except exception.NonExistentBucket as err:
|
||||
create_bucket_not_found_issue(hass, entry, entry.data[CONF_BUCKET])
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except exception.ConnectionReset as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except exception.MissingAccountData as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from err
|
||||
|
||||
entry.runtime_data = bucket
|
||||
|
||||
def _async_notify_backup_listeners() -> None:
|
||||
"""Notify any registered backup agent listeners."""
|
||||
_LOGGER.debug("Notifying backup listeners for entry %s", entry.entry_id)
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||
|
||||
async def _periodic_issue_check(_now: Any) -> None:
|
||||
"""Periodically check for repair issues."""
|
||||
await async_check_for_repair_issues(hass, entry)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval(hass, _periodic_issue_check, timedelta(minutes=30))
|
||||
)
|
||||
|
||||
hass.async_create_task(async_check_for_repair_issues(hass, entry))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||
"""Unload a Backblaze B2 config entry.
|
||||
|
||||
Any resources directly managed by this entry that need explicit shutdown
|
||||
would be handled here. In this case, the `async_on_state_change` listener
|
||||
handles the notification logic on unload.
|
||||
"""
|
||||
return True
|
||||
@@ -1,615 +0,0 @@
|
||||
"""Backup platform for the Backblaze B2 integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import FileVersion
|
||||
from b2sdk.v2.exception import B2Error
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util.async_iterator import AsyncIteratorReader
|
||||
|
||||
from . import BackblazeConfigEntry
|
||||
from .const import (
|
||||
CONF_PREFIX,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
METADATA_FILE_SUFFIX,
|
||||
METADATA_VERSION,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Cache TTL for backup list (in seconds)
|
||||
CACHE_TTL = 300
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
|
||||
|
||||
def _parse_metadata(raw_content: str) -> dict[str, Any]:
|
||||
"""Parse metadata content from JSON."""
|
||||
try:
|
||||
data = json.loads(raw_content)
|
||||
except json.JSONDecodeError as err:
|
||||
raise ValueError(f"Invalid JSON format: {err}") from err
|
||||
else:
|
||||
if not isinstance(data, dict):
|
||||
raise TypeError("JSON content is not a dictionary")
|
||||
return data
|
||||
|
||||
|
||||
def _find_backup_file_for_metadata(
|
||||
metadata_filename: str, all_files: dict[str, FileVersion], prefix: str
|
||||
) -> FileVersion | None:
|
||||
"""Find corresponding backup file for metadata file."""
|
||||
base_name = metadata_filename[len(prefix) :].removesuffix(METADATA_FILE_SUFFIX)
|
||||
return next(
|
||||
(
|
||||
file
|
||||
for name, file in all_files.items()
|
||||
if name.startswith(prefix + base_name)
|
||||
and name.endswith(".tar")
|
||||
and name != metadata_filename
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def _create_backup_from_metadata(
|
||||
metadata_content: dict[str, Any], backup_file: FileVersion
|
||||
) -> AgentBackup:
|
||||
"""Construct an AgentBackup from parsed metadata content and the associated backup file."""
|
||||
metadata = metadata_content["backup_metadata"]
|
||||
metadata["size"] = backup_file.size
|
||||
return AgentBackup.from_dict(metadata)
|
||||
|
||||
|
||||
def handle_b2_errors[T](
|
||||
func: Callable[..., Coroutine[Any, Any, T]],
|
||||
) -> Callable[..., Coroutine[Any, Any, T]]:
|
||||
"""Handle B2Errors by converting them to BackupAgentError."""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
"""Catch B2Error and raise BackupAgentError."""
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except B2Error as err:
|
||||
error_msg = f"Failed during {func.__name__}"
|
||||
raise BackupAgentError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents for all configured Backblaze B2 entries."""
|
||||
entries: list[BackblazeConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
return [BackblazeBackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when backup agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS, None)
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
class BackblazeBackupAgent(BackupAgent):
|
||||
"""Backup agent for Backblaze B2 cloud storage."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: BackblazeConfigEntry) -> None:
|
||||
"""Initialize the Backblaze B2 agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._bucket = entry.runtime_data
|
||||
self._prefix = entry.data[CONF_PREFIX]
|
||||
|
||||
self.name = entry.title
|
||||
self.unique_id = entry.entry_id
|
||||
|
||||
self._all_files_cache: dict[str, FileVersion] = {}
|
||||
self._all_files_cache_expiration: float = 0.0
|
||||
self._backup_list_cache: dict[str, AgentBackup] = {}
|
||||
self._backup_list_cache_expiration: float = 0.0
|
||||
|
||||
self._all_files_cache_lock = asyncio.Lock()
|
||||
self._backup_list_cache_lock = asyncio.Lock()
|
||||
|
||||
def _is_cache_valid(self, expiration_time: float) -> bool:
|
||||
"""Check if cache is still valid based on expiration time."""
|
||||
return time() <= expiration_time
|
||||
|
||||
async def _cleanup_failed_upload(self, filename: str) -> None:
|
||||
"""Clean up a partially uploaded file after upload failure."""
|
||||
_LOGGER.warning(
|
||||
"Attempting to delete partially uploaded main backup file %s "
|
||||
"due to metadata upload failure",
|
||||
filename,
|
||||
)
|
||||
try:
|
||||
uploaded_main_file_info = await self._hass.async_add_executor_job(
|
||||
self._bucket.get_file_info_by_name, filename
|
||||
)
|
||||
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
|
||||
except B2Error:
|
||||
_LOGGER.debug(
|
||||
"Failed to clean up partially uploaded main backup file %s. "
|
||||
"Manual intervention may be required to delete it from Backblaze B2",
|
||||
filename,
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Successfully deleted partially uploaded main backup file %s", filename
|
||||
)
|
||||
|
||||
async def _get_file_for_download(self, backup_id: str) -> FileVersion:
|
||||
"""Get backup file for download, raising if not found."""
|
||||
file, _ = await self._find_file_and_metadata_version_by_id(backup_id)
|
||||
if not file:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return file
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_download_backup(
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup from Backblaze B2."""
|
||||
file = await self._get_file_for_download(backup_id)
|
||||
_LOGGER.debug("Downloading %s", file.file_name)
|
||||
|
||||
downloaded_file = await self._hass.async_add_executor_job(file.download)
|
||||
response = downloaded_file.response
|
||||
|
||||
async def stream_response() -> AsyncIterator[bytes]:
|
||||
"""Stream the response into an AsyncIterator."""
|
||||
try:
|
||||
iterator = response.iter_content(chunk_size=1024 * 1024)
|
||||
while True:
|
||||
chunk = await self._hass.async_add_executor_job(
|
||||
next, iterator, None
|
||||
)
|
||||
if chunk is None:
|
||||
break
|
||||
yield chunk
|
||||
finally:
|
||||
_LOGGER.debug("Finished streaming download for %s", file.file_name)
|
||||
|
||||
return stream_response()
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup to Backblaze B2.
|
||||
|
||||
This involves uploading the main backup archive and a separate metadata JSON file.
|
||||
"""
|
||||
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||
prefixed_tar_filename = self._prefix + tar_filename
|
||||
prefixed_metadata_filename = self._prefix + metadata_filename
|
||||
|
||||
metadata_content_bytes = json.dumps(
|
||||
{
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_metadata": backup.as_dict(),
|
||||
}
|
||||
).encode("utf-8")
|
||||
|
||||
_LOGGER.debug(
|
||||
"Uploading backup: %s, and metadata: %s",
|
||||
prefixed_tar_filename,
|
||||
prefixed_metadata_filename,
|
||||
)
|
||||
|
||||
upload_successful = False
|
||||
try:
|
||||
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
|
||||
_LOGGER.debug(
|
||||
"Main backup file upload finished for %s", prefixed_tar_filename
|
||||
)
|
||||
|
||||
_LOGGER.debug("Uploading metadata file: %s", prefixed_metadata_filename)
|
||||
await self._upload_metadata_file(
|
||||
metadata_content_bytes, prefixed_metadata_filename
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Metadata file upload finished for %s", prefixed_metadata_filename
|
||||
)
|
||||
upload_successful = True
|
||||
finally:
|
||||
if upload_successful:
|
||||
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
|
||||
self._invalidate_caches(
|
||||
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
|
||||
)
|
||||
else:
|
||||
await self._cleanup_failed_upload(prefixed_tar_filename)
|
||||
|
||||
def _upload_metadata_file_sync(
|
||||
self, metadata_content: bytes, filename: str
|
||||
) -> None:
|
||||
"""Synchronously upload metadata file to B2."""
|
||||
self._bucket.upload_bytes(
|
||||
metadata_content,
|
||||
filename,
|
||||
content_type="application/json",
|
||||
file_info={"metadata_only": "true"},
|
||||
)
|
||||
|
||||
async def _upload_metadata_file(
|
||||
self, metadata_content: bytes, filename: str
|
||||
) -> None:
|
||||
"""Upload metadata file to B2."""
|
||||
await self._hass.async_add_executor_job(
|
||||
self._upload_metadata_file_sync,
|
||||
metadata_content,
|
||||
filename,
|
||||
)
|
||||
|
||||
def _upload_unbound_stream_sync(
|
||||
self,
|
||||
reader: AsyncIteratorReader,
|
||||
filename: str,
|
||||
content_type: str,
|
||||
file_info: dict[str, Any],
|
||||
) -> FileVersion:
|
||||
"""Synchronously upload unbound stream to B2."""
|
||||
return self._bucket.upload_unbound_stream(
|
||||
reader,
|
||||
filename,
|
||||
content_type=content_type,
|
||||
file_info=file_info,
|
||||
)
|
||||
|
||||
def _download_and_parse_metadata_sync(
|
||||
self, metadata_file_version: FileVersion
|
||||
) -> dict[str, Any]:
|
||||
"""Synchronously download and parse metadata file."""
|
||||
return _parse_metadata(
|
||||
metadata_file_version.download().response.content.decode("utf-8")
|
||||
)
|
||||
|
||||
async def _upload_backup_file(
|
||||
self,
|
||||
filename: str,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
file_info: dict[str, Any],
|
||||
) -> None:
|
||||
"""Upload backup file to B2 using streaming."""
|
||||
_LOGGER.debug("Starting streaming upload for %s", filename)
|
||||
|
||||
stream = await open_stream()
|
||||
reader = AsyncIteratorReader(self._hass.loop, stream)
|
||||
|
||||
_LOGGER.debug("Uploading backup file %s with streaming", filename)
|
||||
try:
|
||||
content_type, _ = mimetypes.guess_type(filename)
|
||||
file_version = await self._hass.async_add_executor_job(
|
||||
self._upload_unbound_stream_sync,
|
||||
reader,
|
||||
filename,
|
||||
content_type or "application/x-tar",
|
||||
file_info,
|
||||
)
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
_LOGGER.debug("Successfully uploaded %s (ID: %s)", filename, file_version.id_)
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
"""Delete a backup and its associated metadata file from Backblaze B2."""
|
||||
file, metadata_file = await self._find_file_and_metadata_version_by_id(
|
||||
backup_id
|
||||
)
|
||||
if not file:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
# Invariant: when file is not None, metadata_file is also not None
|
||||
assert metadata_file is not None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Deleting backup file: %s and metadata file: %s",
|
||||
file.file_name,
|
||||
metadata_file.file_name,
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(file.delete)
|
||||
await self._hass.async_add_executor_job(metadata_file.delete)
|
||||
|
||||
self._invalidate_caches(
|
||||
backup_id,
|
||||
file.file_name,
|
||||
metadata_file.file_name,
|
||||
remove_files=True,
|
||||
)
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List all backups by finding their associated metadata files in Backblaze B2."""
|
||||
async with self._backup_list_cache_lock:
|
||||
if self._backup_list_cache and self._is_cache_valid(
|
||||
self._backup_list_cache_expiration
|
||||
):
|
||||
_LOGGER.debug("Returning backups from cache")
|
||||
return list(self._backup_list_cache.values())
|
||||
|
||||
_LOGGER.debug(
|
||||
"Cache expired or empty, fetching all files from B2 to build backup list"
|
||||
)
|
||||
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||
|
||||
_LOGGER.debug(
|
||||
"Files found in prefix '%s': %s",
|
||||
self._prefix,
|
||||
list(all_files_in_prefix.keys()),
|
||||
)
|
||||
|
||||
# Process metadata files sequentially to avoid exhausting executor pool
|
||||
backups = {}
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
backup = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
if backup:
|
||||
backups[backup.backup_id] = backup
|
||||
self._backup_list_cache = backups
|
||||
self._backup_list_cache_expiration = time() + CACHE_TTL
|
||||
|
||||
return list(backups.values())
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
|
||||
"""Get a specific backup by its ID from Backblaze B2."""
|
||||
if self._backup_list_cache and self._is_cache_valid(
|
||||
self._backup_list_cache_expiration
|
||||
):
|
||||
if backup := self._backup_list_cache.get(backup_id):
|
||||
_LOGGER.debug("Returning backup %s from cache", backup_id)
|
||||
return backup
|
||||
|
||||
file, metadata_file_version = await self._find_file_and_metadata_version_by_id(
|
||||
backup_id
|
||||
)
|
||||
if not file or not metadata_file_version:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
metadata_content = await self._hass.async_add_executor_job(
|
||||
self._download_and_parse_metadata_sync,
|
||||
metadata_file_version,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully retrieved metadata for backup ID %s from file %s",
|
||||
backup_id,
|
||||
metadata_file_version.file_name,
|
||||
)
|
||||
backup = _create_backup_from_metadata(metadata_content, file)
|
||||
|
||||
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||
self._backup_list_cache[backup.backup_id] = backup
|
||||
|
||||
return backup
|
||||
|
||||
async def _find_file_and_metadata_version_by_id(
|
||||
self, backup_id: str
|
||||
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||
"""Find the main backup file and its associated metadata file version by backup ID."""
|
||||
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||
|
||||
# Process metadata files sequentially to avoid exhausting executor pool
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
(
|
||||
result_backup_file,
|
||||
result_metadata_file_version,
|
||||
) = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_for_id_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
backup_id,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
if result_backup_file and result_metadata_file_version:
|
||||
return result_backup_file, result_metadata_file_version
|
||||
|
||||
_LOGGER.debug("Backup %s not found", backup_id)
|
||||
return None, None
|
||||
|
||||
def _process_metadata_file_for_id_sync(
|
||||
self,
|
||||
file_name: str,
|
||||
file_version: FileVersion,
|
||||
target_backup_id: str,
|
||||
all_files_in_prefix: dict[str, FileVersion],
|
||||
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||
"""Synchronously process a single metadata file for a specific backup ID.
|
||||
|
||||
Called within a thread pool executor.
|
||||
"""
|
||||
try:
|
||||
download_response = file_version.download().response
|
||||
except B2Error as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to download metadata file %s during ID search: %s",
|
||||
file_name,
|
||||
err,
|
||||
)
|
||||
return None, None
|
||||
|
||||
try:
|
||||
metadata_content = _parse_metadata(
|
||||
download_response.content.decode("utf-8")
|
||||
)
|
||||
except ValueError:
|
||||
return None, None
|
||||
|
||||
if metadata_content["backup_id"] != target_backup_id:
|
||||
_LOGGER.debug(
|
||||
"Metadata file %s does not match target backup ID %s",
|
||||
file_name,
|
||||
target_backup_id,
|
||||
)
|
||||
return None, None
|
||||
|
||||
found_backup_file = _find_backup_file_for_metadata(
|
||||
file_name, all_files_in_prefix, self._prefix
|
||||
)
|
||||
if not found_backup_file:
|
||||
_LOGGER.warning(
|
||||
"Found metadata file %s for backup ID %s, but no corresponding backup file",
|
||||
file_name,
|
||||
target_backup_id,
|
||||
)
|
||||
return None, None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Found backup file %s and metadata file %s for ID %s",
|
||||
found_backup_file.file_name,
|
||||
file_name,
|
||||
target_backup_id,
|
||||
)
|
||||
return found_backup_file, file_version
|
||||
|
||||
async def _get_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||
"""Get all file versions in the configured prefix from Backblaze B2.
|
||||
|
||||
Uses a cache to minimize API calls.
|
||||
|
||||
This fetches a flat list of all files, including main backups and metadata files.
|
||||
"""
|
||||
async with self._all_files_cache_lock:
|
||||
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||
_LOGGER.debug("Returning all files from cache")
|
||||
return self._all_files_cache
|
||||
|
||||
_LOGGER.debug("Cache for all files expired or empty, fetching from B2")
|
||||
all_files_in_prefix = await self._hass.async_add_executor_job(
|
||||
self._fetch_all_files_in_prefix
|
||||
)
|
||||
self._all_files_cache = all_files_in_prefix
|
||||
self._all_files_cache_expiration = time() + CACHE_TTL
|
||||
return all_files_in_prefix
|
||||
|
||||
def _fetch_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||
"""Fetch all files in the configured prefix from B2."""
|
||||
all_files: dict[str, FileVersion] = {}
|
||||
for file, _ in self._bucket.ls(self._prefix):
|
||||
all_files[file.file_name] = file
|
||||
return all_files
|
||||
|
||||
def _process_metadata_file_sync(
|
||||
self,
|
||||
file_name: str,
|
||||
file_version: FileVersion,
|
||||
all_files_in_prefix: dict[str, FileVersion],
|
||||
) -> AgentBackup | None:
|
||||
"""Synchronously process a single metadata file and return an AgentBackup if valid."""
|
||||
try:
|
||||
download_response = file_version.download().response
|
||||
except B2Error as err:
|
||||
_LOGGER.warning("Failed to download metadata file %s: %s", file_name, err)
|
||||
return None
|
||||
|
||||
try:
|
||||
metadata_content = _parse_metadata(
|
||||
download_response.content.decode("utf-8")
|
||||
)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
found_backup_file = _find_backup_file_for_metadata(
|
||||
file_name, all_files_in_prefix, self._prefix
|
||||
)
|
||||
if not found_backup_file:
|
||||
_LOGGER.warning(
|
||||
"Found metadata file %s but no corresponding backup file",
|
||||
file_name,
|
||||
)
|
||||
return None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully processed metadata file %s for backup ID %s",
|
||||
file_name,
|
||||
metadata_content["backup_id"],
|
||||
)
|
||||
return _create_backup_from_metadata(metadata_content, found_backup_file)
|
||||
|
||||
def _invalidate_caches(
|
||||
self,
|
||||
backup_id: str,
|
||||
tar_filename: str,
|
||||
metadata_filename: str | None,
|
||||
*,
|
||||
remove_files: bool = False,
|
||||
) -> None:
|
||||
"""Invalidate caches after upload/deletion operations.
|
||||
|
||||
Args:
|
||||
backup_id: The backup ID to remove from backup cache
|
||||
tar_filename: The tar filename to remove from files cache
|
||||
metadata_filename: The metadata filename to remove from files cache
|
||||
remove_files: If True, remove specific files from cache; if False, expire entire cache
|
||||
"""
|
||||
if remove_files:
|
||||
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||
self._all_files_cache.pop(tar_filename, None)
|
||||
if metadata_filename:
|
||||
self._all_files_cache.pop(metadata_filename, None)
|
||||
|
||||
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||
self._backup_list_cache.pop(backup_id, None)
|
||||
else:
|
||||
# For uploads, we can't easily add new FileVersion objects without API calls,
|
||||
# so we expire the entire cache for simplicity
|
||||
self._all_files_cache_expiration = 0.0
|
||||
self._backup_list_cache_expiration = 0.0
|
||||
@@ -1,288 +0,0 @@
|
||||
"""Config flow for the Backblaze B2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import B2Api, InMemoryAccountInfo, exception
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
BACKBLAZE_REALM,
|
||||
CONF_APPLICATION_KEY,
|
||||
CONF_BUCKET,
|
||||
CONF_KEY_ID,
|
||||
CONF_PREFIX,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Constants
|
||||
REQUIRED_CAPABILITIES = {"writeFiles", "listFiles", "deleteFiles", "readFiles"}
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_KEY_ID): cv.string,
|
||||
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_BUCKET): cv.string,
|
||||
vol.Optional(CONF_PREFIX, default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Backblaze B2."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
reauth_entry: ConfigEntry[Any] | None
|
||||
|
||||
def _abort_if_duplicate_credentials(self, user_input: dict[str, Any]) -> None:
|
||||
"""Abort if credentials already exist in another entry."""
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||
}
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._abort_if_duplicate_credentials(user_input)
|
||||
|
||||
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||
user_input
|
||||
)
|
||||
|
||||
if not errors:
|
||||
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||
"/"
|
||||
):
|
||||
user_input[CONF_PREFIX] += "/"
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_BUCKET], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||
)
|
||||
|
||||
async def _async_validate_backblaze_connection(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> tuple[dict[str, str], dict[str, str]]:
|
||||
"""Validate Backblaze B2 credentials, bucket, capabilities, and prefix.
|
||||
|
||||
Returns a tuple of (errors_dict, placeholders_dict).
|
||||
"""
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
info = InMemoryAccountInfo()
|
||||
b2_api = B2Api(info)
|
||||
|
||||
def _authorize_and_get_bucket_sync() -> None:
|
||||
"""Synchronously authorize the account and get the bucket by name.
|
||||
|
||||
This function is run in the executor because b2sdk operations are blocking.
|
||||
"""
|
||||
b2_api.authorize_account(
|
||||
BACKBLAZE_REALM, # Use the defined realm constant
|
||||
user_input[CONF_KEY_ID],
|
||||
user_input[CONF_APPLICATION_KEY],
|
||||
)
|
||||
b2_api.get_bucket_by_name(user_input[CONF_BUCKET])
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||
|
||||
allowed = b2_api.account_info.get_allowed()
|
||||
|
||||
# Check if allowed info is available
|
||||
if allowed is None or not allowed.get("capabilities"):
|
||||
errors["base"] = "invalid_capability"
|
||||
placeholders["missing_capabilities"] = ", ".join(
|
||||
sorted(REQUIRED_CAPABILITIES)
|
||||
)
|
||||
else:
|
||||
# Check if all required capabilities are present
|
||||
current_caps = set(allowed["capabilities"])
|
||||
if not REQUIRED_CAPABILITIES.issubset(current_caps):
|
||||
missing_caps = REQUIRED_CAPABILITIES - current_caps
|
||||
_LOGGER.warning(
|
||||
"Missing required Backblaze B2 capabilities for Key ID '%s': %s",
|
||||
user_input[CONF_KEY_ID],
|
||||
", ".join(sorted(missing_caps)),
|
||||
)
|
||||
errors["base"] = "invalid_capability"
|
||||
placeholders["missing_capabilities"] = ", ".join(
|
||||
sorted(missing_caps)
|
||||
)
|
||||
else:
|
||||
# Only check prefix if capabilities are valid
|
||||
configured_prefix: str = user_input[CONF_PREFIX]
|
||||
allowed_prefix = allowed.get("namePrefix") or ""
|
||||
# Ensure configured prefix starts with Backblaze B2's allowed prefix
|
||||
if allowed_prefix and not configured_prefix.startswith(
|
||||
allowed_prefix
|
||||
):
|
||||
errors[CONF_PREFIX] = "invalid_prefix"
|
||||
placeholders["allowed_prefix"] = allowed_prefix
|
||||
|
||||
except exception.Unauthorized:
|
||||
_LOGGER.debug(
|
||||
"Backblaze B2 authentication failed for Key ID '%s'",
|
||||
user_input[CONF_KEY_ID],
|
||||
)
|
||||
errors["base"] = "invalid_credentials"
|
||||
except exception.RestrictedBucket as err:
|
||||
_LOGGER.debug(
|
||||
"Access to Backblaze B2 bucket '%s' is restricted: %s",
|
||||
user_input[CONF_BUCKET],
|
||||
err,
|
||||
)
|
||||
placeholders["restricted_bucket_name"] = err.bucket_name
|
||||
errors[CONF_BUCKET] = "restricted_bucket"
|
||||
except exception.NonExistentBucket:
|
||||
_LOGGER.debug(
|
||||
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||
)
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except exception.ConnectionReset:
|
||||
_LOGGER.error("Failed to connect to Backblaze B2. Connection reset")
|
||||
errors["base"] = "cannot_connect"
|
||||
except exception.MissingAccountData:
|
||||
# This generally indicates an issue with how InMemoryAccountInfo is used
|
||||
_LOGGER.error(
|
||||
"Missing account data during Backblaze B2 authorization for Key ID '%s'",
|
||||
user_input[CONF_KEY_ID],
|
||||
)
|
||||
errors["base"] = "invalid_credentials"
|
||||
except Exception:
|
||||
_LOGGER.exception(
|
||||
"An unexpected error occurred during Backblaze B2 configuration for Key ID '%s'",
|
||||
user_input[CONF_KEY_ID],
|
||||
)
|
||||
errors["base"] = "unknown"
|
||||
|
||||
return errors, placeholders
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication flow."""
|
||||
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
assert self.reauth_entry is not None
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication."""
|
||||
assert self.reauth_entry is not None
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._abort_if_duplicate_credentials(user_input)
|
||||
|
||||
validation_input = {
|
||||
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||
CONF_BUCKET: self.reauth_entry.data[CONF_BUCKET],
|
||||
CONF_PREFIX: self.reauth_entry.data[CONF_PREFIX],
|
||||
}
|
||||
|
||||
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||
validation_input
|
||||
)
|
||||
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
self.reauth_entry,
|
||||
data_updates={
|
||||
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_KEY_ID): cv.string,
|
||||
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"brand_name": "Backblaze B2",
|
||||
"bucket": self.reauth_entry.data[CONF_BUCKET],
|
||||
**placeholders,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration flow."""
|
||||
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
assert entry is not None
|
||||
|
||||
if user_input is not None:
|
||||
self._abort_if_duplicate_credentials(user_input)
|
||||
|
||||
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||
user_input
|
||||
)
|
||||
|
||||
if not errors:
|
||||
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||
"/"
|
||||
):
|
||||
user_input[CONF_PREFIX] += "/"
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
else:
|
||||
errors = {}
|
||||
placeholders = {}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input or entry.data
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||
)
|
||||
@@ -1,22 +0,0 @@
|
||||
"""Constants for the Backblaze B2 integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "backblaze_b2"
|
||||
|
||||
CONF_KEY_ID = "key_id"
|
||||
CONF_APPLICATION_KEY = "application_key"
|
||||
CONF_BUCKET = "bucket"
|
||||
CONF_PREFIX = "prefix"
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
METADATA_FILE_SUFFIX = ".metadata.json"
|
||||
METADATA_VERSION = "1"
|
||||
|
||||
BACKBLAZE_REALM = "production"
|
||||
@@ -1,56 +0,0 @@
|
||||
"""Diagnostics support for Backblaze B2."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BackblazeConfigEntry
|
||||
from .const import CONF_APPLICATION_KEY, CONF_KEY_ID
|
||||
|
||||
TO_REDACT_ENTRY_DATA = {CONF_APPLICATION_KEY, CONF_KEY_ID}
|
||||
TO_REDACT_ACCOUNT_DATA_ALLOWED = {"bucketId", "bucketName", "namePrefix"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BackblazeConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
bucket = entry.runtime_data
|
||||
|
||||
try:
|
||||
bucket_info = {
|
||||
"name": bucket.name,
|
||||
"id": bucket.id_,
|
||||
"type": bucket.type_,
|
||||
"cors_rules": bucket.cors_rules,
|
||||
"lifecycle_rules": bucket.lifecycle_rules,
|
||||
"revision": bucket.revision,
|
||||
}
|
||||
|
||||
account_info = bucket.api.account_info
|
||||
account_data: dict[str, Any] = {
|
||||
"account_id": account_info.get_account_id(),
|
||||
"api_url": account_info.get_api_url(),
|
||||
"download_url": account_info.get_download_url(),
|
||||
"minimum_part_size": account_info.get_minimum_part_size(),
|
||||
"allowed": account_info.get_allowed(),
|
||||
}
|
||||
|
||||
if isinstance(account_data["allowed"], dict):
|
||||
account_data["allowed"] = async_redact_data(
|
||||
account_data["allowed"], TO_REDACT_ACCOUNT_DATA_ALLOWED
|
||||
)
|
||||
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
bucket_info = {"name": "unknown", "id": "unknown"}
|
||||
account_data = {"error": "Failed to retrieve detailed account information"}
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_ENTRY_DATA),
|
||||
"entry_options": entry.options,
|
||||
"bucket_info": bucket_info,
|
||||
"account_info": account_data,
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"domain": "backblaze_b2",
|
||||
"name": "Backblaze B2",
|
||||
"codeowners": ["@hugo-vrijswijk", "@ElCruncharino"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/backblaze_b2",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["b2sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["b2sdk==2.8.1"]
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: Integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not have any custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities of this integration do not explicitly subscribe to events.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: The integration extends core functionality and does not require examples.
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This integration does not support physical devices.
|
||||
docs-supported-functions:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not use icons.
|
||||
reconfiguration-flow: done
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: exempt
|
||||
comment: |
|
||||
The b2sdk library is synchronous by design. All sync operations are properly
|
||||
wrapped with async_add_executor_job to prevent blocking the event loop.
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
The b2sdk library does not support custom HTTP session injection.
|
||||
It manages HTTP connections internally through its own session management.
|
||||
strict-typing:
|
||||
status: exempt
|
||||
comment: |
|
||||
The b2sdk dependency does not include a py.typed file and is not PEP 561 compliant.
|
||||
This is outside the integration's control as it's a third-party library requirement.
|
||||
@@ -1,93 +0,0 @@
|
||||
"""Repair issues for the Backblaze B2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from b2sdk.v2.exception import (
|
||||
B2Error,
|
||||
NonExistentBucket,
|
||||
RestrictedBucket,
|
||||
Unauthorized,
|
||||
)
|
||||
|
||||
from homeassistant.components.repairs import ConfirmRepairFlow
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .const import CONF_BUCKET, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ISSUE_BUCKET_ACCESS_RESTRICTED = "bucket_access_restricted"
|
||||
ISSUE_BUCKET_NOT_FOUND = "bucket_not_found"
|
||||
|
||||
|
||||
def _create_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
issue_type: str,
|
||||
bucket_name: str,
|
||||
) -> None:
|
||||
"""Create a repair issue with standard parameters."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"{issue_type}_{entry.entry_id}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key=issue_type,
|
||||
translation_placeholders={
|
||||
"brand_name": "Backblaze B2",
|
||||
"title": entry.title,
|
||||
"bucket_name": bucket_name,
|
||||
"entry_id": entry.entry_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def create_bucket_access_restricted_issue(
|
||||
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||
) -> None:
|
||||
"""Create a repair issue for restricted bucket access."""
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, bucket_name)
|
||||
|
||||
|
||||
def create_bucket_not_found_issue(
|
||||
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||
) -> None:
|
||||
"""Create a repair issue for non-existent bucket."""
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, bucket_name)
|
||||
|
||||
|
||||
async def async_check_for_repair_issues(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Check for common issues that require user action."""
|
||||
bucket = entry.runtime_data
|
||||
restricted_issue_id = f"{ISSUE_BUCKET_ACCESS_RESTRICTED}_{entry.entry_id}"
|
||||
not_found_issue_id = f"{ISSUE_BUCKET_NOT_FOUND}_{entry.entry_id}"
|
||||
|
||||
try:
|
||||
await hass.async_add_executor_job(bucket.api.account_info.get_allowed)
|
||||
ir.async_delete_issue(hass, DOMAIN, restricted_issue_id)
|
||||
ir.async_delete_issue(hass, DOMAIN, not_found_issue_id)
|
||||
except Unauthorized:
|
||||
entry.async_start_reauth(hass)
|
||||
except RestrictedBucket as err:
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, err.bucket_name)
|
||||
except NonExistentBucket:
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, entry.data[CONF_BUCKET])
|
||||
except B2Error as err:
|
||||
_LOGGER.debug("B2 connectivity test failed: %s", err)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> ConfirmRepairFlow:
|
||||
"""Create a fix flow for Backblaze B2 issues."""
|
||||
return ConfirmRepairFlow()
|
||||
@@ -1,92 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
|
||||
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
|
||||
"invalid_credentials": "[%key:component::backblaze_b2::exceptions::invalid_credentials::message%]",
|
||||
"invalid_prefix": "[%key:component::backblaze_b2::exceptions::invalid_prefix::message%]",
|
||||
"restricted_bucket": "[%key:component::backblaze_b2::exceptions::restricted_bucket::message%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"application_key": "Application key",
|
||||
"key_id": "Key ID"
|
||||
},
|
||||
"data_description": {
|
||||
"application_key": "Application key to connect to {brand_name}",
|
||||
"key_id": "Key ID to connect to {brand_name}"
|
||||
},
|
||||
"description": "Update your {brand_name} credentials for bucket {bucket}.",
|
||||
"title": "Reauthenticate {brand_name}"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"application_key": "Application key",
|
||||
"bucket": "Bucket name",
|
||||
"key_id": "Key ID",
|
||||
"prefix": "Folder prefix (optional)"
|
||||
},
|
||||
"data_description": {
|
||||
"application_key": "Application key to connect to {brand_name}",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"key_id": "Key ID to connect to {brand_name}",
|
||||
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||
},
|
||||
"title": "Reconfigure {brand_name}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"application_key": "Application key",
|
||||
"bucket": "Bucket name",
|
||||
"key_id": "Key ID",
|
||||
"prefix": "Folder prefix (optional)"
|
||||
},
|
||||
"data_description": {
|
||||
"application_key": "Application key to connect to {brand_name}",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"key_id": "Key ID to connect to {brand_name}",
|
||||
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||
},
|
||||
"title": "Add {brand_name} backup"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to endpoint"
|
||||
},
|
||||
"invalid_bucket_name": {
|
||||
"message": "Bucket does not exist or is not writable by the provided credentials."
|
||||
},
|
||||
"invalid_capability": {
|
||||
"message": "Application key does not have the required read/write capabilities."
|
||||
},
|
||||
"invalid_credentials": {
|
||||
"message": "Bucket cannot be accessed using provided of key ID and application key."
|
||||
},
|
||||
"invalid_prefix": {
|
||||
"message": "Prefix is not allowed for provided key. Must start with {allowed_prefix}."
|
||||
},
|
||||
"restricted_bucket": {
|
||||
"message": "Application key is restricted to bucket {restricted_bucket_name}."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"bucket_access_restricted": {
|
||||
"description": "Access to your {brand_name} bucket {bucket_name} is restricted for the current credentials. This means your application key may only have access to specific buckets, but not this one. To fix this issue:\n\n1. Log in to your {brand_name} account\n2. Check your application key restrictions\n3. Either use a different bucket that your key can access, or create a new application key with access to {bucket_name}\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||
"title": "{brand_name} bucket access restricted"
|
||||
},
|
||||
"bucket_not_found": {
|
||||
"description": "The {brand_name} bucket {bucket_name} cannot be found or accessed. This could mean:\n\n1. The bucket was deleted\n2. The bucket name was changed\n3. Your credentials no longer have access to this bucket\n\nTo fix this issue:\n\n1. Log in to your {brand_name} account\n2. Verify the bucket still exists and check its name\n3. Ensure your application key has access to this bucket\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||
"title": "{brand_name} bucket not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -27,9 +27,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_COUNTER = "counter"
|
||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||
|
||||
|
||||
@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
|
||||
return HVACAction.HEATING
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="failed_to_parse_hvac_action",
|
||||
translation_key="failed_to_parse_hvac_mode",
|
||||
translation_placeholders={
|
||||
"mode_and_active": mode_and_active,
|
||||
"current_temperature": str(self.current_temperature),
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"failed_to_parse_hvac_action": {
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
|
||||
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
|
||||
},
|
||||
"failed_to_parse_hvac_mode": {
|
||||
"message": "Cannot parse response to HVACMode: {mode}"
|
||||
|
||||
4
homeassistant/components/cups/__init__.py
Normal file
4
homeassistant/components/cups/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""The cups component."""
|
||||
|
||||
DOMAIN = "cups"
|
||||
CONF_PRINTERS = "printers"
|
||||
9
homeassistant/components/cups/manifest.json
Normal file
9
homeassistant/components/cups/manifest.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"domain": "cups",
|
||||
"name": "CUPS",
|
||||
"codeowners": ["@fabaff"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/cups",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pycups==2.0.4"]
|
||||
}
|
||||
349
homeassistant/components/cups/sensor.py
Normal file
349
homeassistant/components/cups/sensor.py
Normal file
@@ -0,0 +1,349 @@
|
||||
"""Details about printers which are connected to CUPS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import importlib
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, PERCENTAGE
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import CONF_PRINTERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_MARKER_TYPE = "marker_type"
|
||||
ATTR_MARKER_LOW_LEVEL = "marker_low_level"
|
||||
ATTR_MARKER_HIGH_LEVEL = "marker_high_level"
|
||||
ATTR_PRINTER_NAME = "printer_name"
|
||||
ATTR_DEVICE_URI = "device_uri"
|
||||
ATTR_PRINTER_INFO = "printer_info"
|
||||
ATTR_PRINTER_IS_SHARED = "printer_is_shared"
|
||||
ATTR_PRINTER_LOCATION = "printer_location"
|
||||
ATTR_PRINTER_MODEL = "printer_model"
|
||||
ATTR_PRINTER_STATE_MESSAGE = "printer_state_message"
|
||||
ATTR_PRINTER_STATE_REASON = "printer_state_reason"
|
||||
ATTR_PRINTER_TYPE = "printer_type"
|
||||
ATTR_PRINTER_URI_SUPPORTED = "printer_uri_supported"
|
||||
|
||||
CONF_IS_CUPS_SERVER = "is_cups_server"
|
||||
|
||||
DEFAULT_HOST = "127.0.0.1"
|
||||
DEFAULT_PORT = 631
|
||||
DEFAULT_IS_CUPS_SERVER = True
|
||||
|
||||
ICON_PRINTER = "mdi:printer"
|
||||
ICON_MARKER = "mdi:water"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
PRINTER_STATES = {3: "idle", 4: "printing", 5: "stopped"}
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PRINTERS): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_IS_CUPS_SERVER, default=DEFAULT_IS_CUPS_SERVER): cv.boolean,
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the CUPS sensor."""
|
||||
host: str = config[CONF_HOST]
|
||||
port: int = config[CONF_PORT]
|
||||
printers: list[str] = config[CONF_PRINTERS]
|
||||
is_cups: bool = config[CONF_IS_CUPS_SERVER]
|
||||
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "CUPS",
|
||||
},
|
||||
)
|
||||
|
||||
if is_cups:
|
||||
data = CupsData(host, port, None)
|
||||
data.update()
|
||||
if data.available is False:
|
||||
_LOGGER.error("Unable to connect to CUPS server: %s:%s", host, port)
|
||||
raise PlatformNotReady
|
||||
assert data.printers is not None
|
||||
|
||||
dev: list[SensorEntity] = []
|
||||
for printer in printers:
|
||||
if printer not in data.printers:
|
||||
_LOGGER.error("Printer is not present: %s", printer)
|
||||
continue
|
||||
dev.append(CupsSensor(data, printer))
|
||||
|
||||
if "marker-names" in data.attributes[printer]:
|
||||
dev.extend(
|
||||
MarkerSensor(data, printer, marker, True)
|
||||
for marker in data.attributes[printer]["marker-names"]
|
||||
)
|
||||
|
||||
add_entities(dev, True)
|
||||
return
|
||||
|
||||
data = CupsData(host, port, printers)
|
||||
data.update()
|
||||
if data.available is False:
|
||||
_LOGGER.error("Unable to connect to IPP printer: %s:%s", host, port)
|
||||
raise PlatformNotReady
|
||||
|
||||
dev = []
|
||||
for printer in printers:
|
||||
dev.append(IPPSensor(data, printer))
|
||||
|
||||
if "marker-names" in data.attributes[printer]:
|
||||
for marker in data.attributes[printer]["marker-names"]:
|
||||
dev.append(MarkerSensor(data, printer, marker, False))
|
||||
|
||||
add_entities(dev, True)
|
||||
|
||||
|
||||
class CupsSensor(SensorEntity):
|
||||
"""Representation of a CUPS sensor."""
|
||||
|
||||
_attr_icon = ICON_PRINTER
|
||||
|
||||
def __init__(self, data: CupsData, printer_name: str) -> None:
|
||||
"""Initialize the CUPS sensor."""
|
||||
self.data = data
|
||||
self._name = printer_name
|
||||
self._printer: dict[str, Any] | None = None
|
||||
self._attr_available = False
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the entity."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._printer is None:
|
||||
return None
|
||||
|
||||
key = self._printer["printer-state"]
|
||||
return PRINTER_STATES.get(key, key)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
if self._printer is None:
|
||||
return None
|
||||
|
||||
return {
|
||||
ATTR_DEVICE_URI: self._printer["device-uri"],
|
||||
ATTR_PRINTER_INFO: self._printer["printer-info"],
|
||||
ATTR_PRINTER_IS_SHARED: self._printer["printer-is-shared"],
|
||||
ATTR_PRINTER_LOCATION: self._printer["printer-location"],
|
||||
ATTR_PRINTER_MODEL: self._printer["printer-make-and-model"],
|
||||
ATTR_PRINTER_STATE_MESSAGE: self._printer["printer-state-message"],
|
||||
ATTR_PRINTER_STATE_REASON: self._printer["printer-state-reasons"],
|
||||
ATTR_PRINTER_TYPE: self._printer["printer-type"],
|
||||
ATTR_PRINTER_URI_SUPPORTED: self._printer["printer-uri-supported"],
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data and updates the states."""
|
||||
self.data.update()
|
||||
assert self.data.printers is not None
|
||||
self._printer = self.data.printers.get(self.name)
|
||||
self._attr_available = self.data.available
|
||||
|
||||
|
||||
class IPPSensor(SensorEntity):
|
||||
"""Implementation of the IPPSensor.
|
||||
|
||||
This sensor represents the status of the printer.
|
||||
"""
|
||||
|
||||
_attr_icon = ICON_PRINTER
|
||||
|
||||
def __init__(self, data: CupsData, printer_name: str) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.data = data
|
||||
self._printer_name = printer_name
|
||||
self._attributes = None
|
||||
self._attr_available = False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._attributes["printer-make-and-model"]
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
key = self._attributes["printer-state"]
|
||||
return PRINTER_STATES.get(key, key)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
state_attributes = {}
|
||||
|
||||
if "printer-info" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_INFO] = self._attributes["printer-info"]
|
||||
|
||||
if "printer-location" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_LOCATION] = self._attributes[
|
||||
"printer-location"
|
||||
]
|
||||
|
||||
if "printer-state-message" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_STATE_MESSAGE] = self._attributes[
|
||||
"printer-state-message"
|
||||
]
|
||||
|
||||
if "printer-state-reasons" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_STATE_REASON] = self._attributes[
|
||||
"printer-state-reasons"
|
||||
]
|
||||
|
||||
if "printer-uri-supported" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_URI_SUPPORTED] = self._attributes[
|
||||
"printer-uri-supported"
|
||||
]
|
||||
|
||||
return state_attributes
|
||||
|
||||
def update(self) -> None:
|
||||
"""Fetch new state data for the sensor."""
|
||||
self.data.update()
|
||||
self._attributes = self.data.attributes.get(self._printer_name)
|
||||
self._attr_available = self.data.available
|
||||
|
||||
|
||||
class MarkerSensor(SensorEntity):
|
||||
"""Implementation of the MarkerSensor.
|
||||
|
||||
This sensor represents the percentage of ink or toner.
|
||||
"""
|
||||
|
||||
_attr_icon = ICON_MARKER
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
|
||||
def __init__(self, data: CupsData, printer: str, name: str, is_cups: bool) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.data = data
|
||||
self._attr_name = name
|
||||
self._printer = printer
|
||||
self._index = data.attributes[printer]["marker-names"].index(name)
|
||||
self._is_cups = is_cups
|
||||
self._attributes: dict[str, Any] | None = None
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
return self._attributes[self._printer]["marker-levels"][self._index]
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
high_level = self._attributes[self._printer].get("marker-high-levels")
|
||||
if isinstance(high_level, list):
|
||||
high_level = high_level[self._index]
|
||||
|
||||
low_level = self._attributes[self._printer].get("marker-low-levels")
|
||||
if isinstance(low_level, list):
|
||||
low_level = low_level[self._index]
|
||||
|
||||
marker_types = self._attributes[self._printer]["marker-types"]
|
||||
if isinstance(marker_types, list):
|
||||
marker_types = marker_types[self._index]
|
||||
|
||||
if self._is_cups:
|
||||
printer_name = self._printer
|
||||
else:
|
||||
printer_name = self._attributes[self._printer]["printer-make-and-model"]
|
||||
|
||||
return {
|
||||
ATTR_MARKER_HIGH_LEVEL: high_level,
|
||||
ATTR_MARKER_LOW_LEVEL: low_level,
|
||||
ATTR_MARKER_TYPE: marker_types,
|
||||
ATTR_PRINTER_NAME: printer_name,
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the state of the sensor."""
|
||||
# Data fetching is done by CupsSensor/IPPSensor
|
||||
self._attributes = self.data.attributes
|
||||
|
||||
|
||||
class CupsData:
|
||||
"""Get the latest data from CUPS and update the state."""
|
||||
|
||||
def __init__(self, host: str, port: int, ipp_printers: list[str] | None) -> None:
|
||||
"""Initialize the data object."""
|
||||
self._host = host
|
||||
self._port = port
|
||||
self._ipp_printers = ipp_printers
|
||||
self.is_cups = ipp_printers is None
|
||||
self.printers: dict[str, dict[str, Any]] | None = None
|
||||
self.attributes: dict[str, Any] = {}
|
||||
self.available = False
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from CUPS."""
|
||||
cups = importlib.import_module("cups")
|
||||
|
||||
try:
|
||||
conn = cups.Connection(host=self._host, port=self._port)
|
||||
if self.is_cups:
|
||||
self.printers = conn.getPrinters()
|
||||
assert self.printers is not None
|
||||
for printer in self.printers:
|
||||
self.attributes[printer] = conn.getPrinterAttributes(name=printer)
|
||||
else:
|
||||
assert self._ipp_printers is not None
|
||||
for ipp_printer in self._ipp_printers:
|
||||
self.attributes[ipp_printer] = conn.getPrinterAttributes(
|
||||
uri=f"ipp://{self._host}:{self._port}/{ipp_printer}"
|
||||
)
|
||||
|
||||
self.available = True
|
||||
except RuntimeError:
|
||||
self.available = False
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pycync==0.4.3"]
|
||||
"requirements": ["pycync==0.4.2"]
|
||||
}
|
||||
|
||||
3
homeassistant/components/decora/__init__.py
Normal file
3
homeassistant/components/decora/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""The decora component."""
|
||||
|
||||
DOMAIN = "decora"
|
||||
166
homeassistant/components/decora/light.py
Normal file
166
homeassistant/components/decora/light.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""Support for Decora dimmers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import copy
|
||||
from functools import wraps
|
||||
import logging
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from bluepy.btle import BTLEException
|
||||
import decora
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import util
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_DEVICES, CONF_NAME
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _name_validator(config):
|
||||
"""Validate the name."""
|
||||
config = copy.deepcopy(config)
|
||||
for address, device_config in config[CONF_DEVICES].items():
|
||||
if CONF_NAME not in device_config:
|
||||
device_config[CONF_NAME] = util.slugify(address)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
DEVICE_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_API_KEY): cv.string}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
LIGHT_PLATFORM_SCHEMA.extend(
|
||||
{vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}}
|
||||
),
|
||||
_name_validator,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def retry[_DecoraLightT: DecoraLight, **_P, _R](
|
||||
method: Callable[Concatenate[_DecoraLightT, _P], _R],
|
||||
) -> Callable[Concatenate[_DecoraLightT, _P], _R | None]:
|
||||
"""Retry bluetooth commands."""
|
||||
|
||||
@wraps(method)
|
||||
def wrapper_retry(
|
||||
device: _DecoraLightT, *args: _P.args, **kwargs: _P.kwargs
|
||||
) -> _R | None:
|
||||
"""Try send command and retry on error."""
|
||||
|
||||
initial = time.monotonic()
|
||||
while True:
|
||||
if time.monotonic() - initial >= 10:
|
||||
return None
|
||||
try:
|
||||
return method(device, *args, **kwargs)
|
||||
except (decora.decoraException, AttributeError, BTLEException):
|
||||
_LOGGER.warning(
|
||||
"Decora connect error for device %s. Reconnecting",
|
||||
device.name,
|
||||
)
|
||||
device._switch.connect() # noqa: SLF001
|
||||
|
||||
return wrapper_retry
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up an Decora switch."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Leviton Decora",
|
||||
},
|
||||
)
|
||||
|
||||
lights = []
|
||||
for address, device_config in config[CONF_DEVICES].items():
|
||||
device = {}
|
||||
device["name"] = device_config[CONF_NAME]
|
||||
device["key"] = device_config[CONF_API_KEY]
|
||||
device["address"] = address
|
||||
light = DecoraLight(device)
|
||||
lights.append(light)
|
||||
|
||||
add_entities(lights)
|
||||
|
||||
|
||||
class DecoraLight(LightEntity):
|
||||
"""Representation of an Decora light."""
|
||||
|
||||
_attr_color_mode = ColorMode.BRIGHTNESS
|
||||
_attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
def __init__(self, device: dict[str, Any]) -> None:
|
||||
"""Initialize the light."""
|
||||
|
||||
self._attr_name = device["name"]
|
||||
self._attr_unique_id = device["address"]
|
||||
self._key = device["key"]
|
||||
self._switch = decora.decora(device["address"], self._key)
|
||||
self._attr_brightness = 0
|
||||
self._attr_is_on = False
|
||||
|
||||
@retry
|
||||
def set_state(self, brightness: int) -> None:
|
||||
"""Set the state of this lamp to the provided brightness."""
|
||||
self._switch.set_brightness(int(brightness / 2.55))
|
||||
self._attr_brightness = brightness
|
||||
|
||||
@retry
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the specified or all lights on."""
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
self._switch.on()
|
||||
self._attr_is_on = True
|
||||
|
||||
if brightness is not None:
|
||||
self.set_state(brightness)
|
||||
|
||||
@retry
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the specified or all lights off."""
|
||||
self._switch.off()
|
||||
self._attr_is_on = False
|
||||
|
||||
@retry
|
||||
def update(self) -> None:
|
||||
"""Synchronise internal state with the actual light state."""
|
||||
self._attr_brightness = self._switch.get_brightness() * 2.55
|
||||
self._attr_is_on = self._switch.get_on()
|
||||
10
homeassistant/components/decora/manifest.json
Normal file
10
homeassistant/components/decora/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "decora",
|
||||
"name": "Leviton Decora",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/decora",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bluepy", "decora"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["bluepy==1.3.0", "decora==0.6"]
|
||||
}
|
||||
3
homeassistant/components/dlib_face_detect/__init__.py
Normal file
3
homeassistant/components/dlib_face_detect/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""The dlib_face_detect component."""
|
||||
|
||||
DOMAIN = "dlib_face_detect"
|
||||
@@ -0,0 +1,82 @@
|
||||
"""Component that will help set the Dlib face detect processing."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
|
||||
import face_recognition
|
||||
|
||||
from homeassistant.components.image_processing import (
|
||||
PLATFORM_SCHEMA as IMAGE_PROCESSING_PLATFORM_SCHEMA,
|
||||
ImageProcessingFaceEntity,
|
||||
)
|
||||
from homeassistant.const import ATTR_LOCATION, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
PLATFORM_SCHEMA = IMAGE_PROCESSING_PLATFORM_SCHEMA
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dlib Face detection platform."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Dlib Face Detect",
|
||||
},
|
||||
)
|
||||
source: list[dict[str, str]] = config[CONF_SOURCE]
|
||||
add_entities(
|
||||
DlibFaceDetectEntity(camera[CONF_ENTITY_ID], camera.get(CONF_NAME))
|
||||
for camera in source
|
||||
)
|
||||
|
||||
|
||||
class DlibFaceDetectEntity(ImageProcessingFaceEntity):
|
||||
"""Dlib Face API entity for identify."""
|
||||
|
||||
def __init__(self, camera_entity: str, name: str | None) -> None:
|
||||
"""Initialize Dlib face entity."""
|
||||
super().__init__()
|
||||
|
||||
self._attr_camera_entity = camera_entity
|
||||
|
||||
if name:
|
||||
self._attr_name = name
|
||||
else:
|
||||
self._attr_name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
|
||||
|
||||
def process_image(self, image: bytes) -> None:
|
||||
"""Process image."""
|
||||
|
||||
fak_file = io.BytesIO(image)
|
||||
fak_file.name = "snapshot.jpg"
|
||||
fak_file.seek(0)
|
||||
|
||||
image = face_recognition.load_image_file(fak_file)
|
||||
face_locations = face_recognition.face_locations(image)
|
||||
|
||||
face_locations = [{ATTR_LOCATION: location} for location in face_locations]
|
||||
|
||||
self.process_faces(face_locations, len(face_locations))
|
||||
10
homeassistant/components/dlib_face_detect/manifest.json
Normal file
10
homeassistant/components/dlib_face_detect/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "dlib_face_detect",
|
||||
"name": "Dlib Face Detect",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlib_face_detect",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["face_recognition"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["face-recognition==1.2.3"]
|
||||
}
|
||||
4
homeassistant/components/dlib_face_identify/__init__.py
Normal file
4
homeassistant/components/dlib_face_identify/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""The dlib_face_identify component."""
|
||||
|
||||
CONF_FACES = "faces"
|
||||
DOMAIN = "dlib_face_identify"
|
||||
127
homeassistant/components/dlib_face_identify/image_processing.py
Normal file
127
homeassistant/components/dlib_face_identify/image_processing.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Component that will help set the Dlib face detect processing."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
|
||||
import face_recognition
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.image_processing import (
|
||||
CONF_CONFIDENCE,
|
||||
PLATFORM_SCHEMA as IMAGE_PROCESSING_PLATFORM_SCHEMA,
|
||||
FaceInformation,
|
||||
ImageProcessingFaceEntity,
|
||||
)
|
||||
from homeassistant.const import ATTR_NAME, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import CONF_FACES, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = IMAGE_PROCESSING_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FACES): {cv.string: cv.isfile},
|
||||
vol.Optional(CONF_CONFIDENCE, default=0.6): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dlib Face detection platform."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Dlib Face Identify",
|
||||
},
|
||||
)
|
||||
|
||||
confidence: float = config[CONF_CONFIDENCE]
|
||||
faces: dict[str, str] = config[CONF_FACES]
|
||||
source: list[dict[str, str]] = config[CONF_SOURCE]
|
||||
add_entities(
|
||||
DlibFaceIdentifyEntity(
|
||||
camera[CONF_ENTITY_ID],
|
||||
faces,
|
||||
camera.get(CONF_NAME),
|
||||
confidence,
|
||||
)
|
||||
for camera in source
|
||||
)
|
||||
|
||||
|
||||
class DlibFaceIdentifyEntity(ImageProcessingFaceEntity):
|
||||
"""Dlib Face API entity for identify."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
camera_entity: str,
|
||||
faces: dict[str, str],
|
||||
name: str | None,
|
||||
tolerance: float,
|
||||
) -> None:
|
||||
"""Initialize Dlib face identify entry."""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._attr_camera_entity = camera_entity
|
||||
|
||||
if name:
|
||||
self._attr_name = name
|
||||
else:
|
||||
self._attr_name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
|
||||
|
||||
self._faces = {}
|
||||
for face_name, face_file in faces.items():
|
||||
try:
|
||||
image = face_recognition.load_image_file(face_file)
|
||||
self._faces[face_name] = face_recognition.face_encodings(image)[0]
|
||||
except IndexError as err:
|
||||
_LOGGER.error("Failed to parse %s. Error: %s", face_file, err)
|
||||
|
||||
self._tolerance = tolerance
|
||||
|
||||
def process_image(self, image: bytes) -> None:
|
||||
"""Process image."""
|
||||
|
||||
fak_file = io.BytesIO(image)
|
||||
fak_file.name = "snapshot.jpg"
|
||||
fak_file.seek(0)
|
||||
|
||||
image = face_recognition.load_image_file(fak_file)
|
||||
unknowns = face_recognition.face_encodings(image)
|
||||
|
||||
found: list[FaceInformation] = []
|
||||
for unknown_face in unknowns:
|
||||
for name, face in self._faces.items():
|
||||
result = face_recognition.compare_faces(
|
||||
[face], unknown_face, tolerance=self._tolerance
|
||||
)
|
||||
if result[0]:
|
||||
found.append({ATTR_NAME: name})
|
||||
|
||||
self.process_faces(found, len(unknowns))
|
||||
10
homeassistant/components/dlib_face_identify/manifest.json
Normal file
10
homeassistant/components/dlib_face_identify/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "dlib_face_identify",
|
||||
"name": "Dlib Face Identify",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlib_face_identify",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["face_recognition"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["face-recognition==1.2.3"]
|
||||
}
|
||||
@@ -81,9 +81,6 @@
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
@@ -163,9 +160,6 @@
|
||||
"advanced_mode": {
|
||||
"default": "mdi:tune"
|
||||
},
|
||||
"border_spin": {
|
||||
"default": "mdi:rotate-right"
|
||||
},
|
||||
"border_switch": {
|
||||
"default": "mdi:land-fields"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
|
||||
}
|
||||
|
||||
@@ -5,9 +5,8 @@ from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.command import CommandWithMessageHandling
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent, auto_empty
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
@@ -35,9 +34,6 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
||||
|
||||
current_option_fn: Callable[[EventT], str | None]
|
||||
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
||||
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
|
||||
lambda cap, option: cap.set(option)
|
||||
)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
@@ -62,14 +58,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
|
||||
set_option_fn=lambda cap, option: cap.set(None, option),
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -118,17 +106,14 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_event(event: EventT) -> None:
|
||||
if (option := self.entity_description.current_option_fn(event)) is not None:
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
self._attr_current_option = self.entity_description.current_option_fn(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.event, on_event)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(
|
||||
self.entity_description.set_option_fn(self._capability, option)
|
||||
)
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
|
||||
@@ -129,16 +129,6 @@
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"min_10": "10 minutes",
|
||||
"min_15": "15 minutes",
|
||||
"min_25": "25 minutes",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
@@ -241,9 +231,6 @@
|
||||
"advanced_mode": {
|
||||
"name": "Advanced mode"
|
||||
},
|
||||
"border_spin": {
|
||||
"name": "Border spin"
|
||||
},
|
||||
"border_switch": {
|
||||
"name": "Border switch"
|
||||
},
|
||||
|
||||
@@ -99,13 +99,6 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSwitchEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
EcovacsSwitchEntityDescription(
|
||||
capability_fn=lambda c: c.settings.border_spin,
|
||||
key="border_spin",
|
||||
translation_key="border_spin",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
"""The eddystone_temperature component."""
|
||||
|
||||
DOMAIN = "eddystone_temperature"
|
||||
CONF_BEACONS = "beacons"
|
||||
CONF_INSTANCE = "instance"
|
||||
CONF_NAMESPACE = "namespace"
|
||||
10
homeassistant/components/eddystone_temperature/manifest.json
Normal file
10
homeassistant/components/eddystone_temperature/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "eddystone_temperature",
|
||||
"name": "Eddystone",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/eddystone_temperature",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["beacontools"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["beacontools[scan]==2.1.0"]
|
||||
}
|
||||
211
homeassistant/components/eddystone_temperature/sensor.py
Normal file
211
homeassistant/components/eddystone_temperature/sensor.py
Normal file
@@ -0,0 +1,211 @@
|
||||
"""Read temperature information from Eddystone beacons.
|
||||
|
||||
Your beacons must be configured to transmit UID (for identification) and TLM
|
||||
(for temperature) frames.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from beacontools import BeaconScanner, EddystoneFilter, EddystoneTLMFrame
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, Event, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import CONF_BEACONS, CONF_INSTANCE, CONF_NAMESPACE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_BT_DEVICE_ID = "bt_device_id"
|
||||
|
||||
|
||||
BEACON_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAMESPACE): cv.string,
|
||||
vol.Required(CONF_INSTANCE): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_BT_DEVICE_ID, default=0): cv.positive_int,
|
||||
vol.Required(CONF_BEACONS): vol.Schema({cv.string: BEACON_SCHEMA}),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Validate configuration, create devices and start monitoring thread."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Eddystone",
|
||||
},
|
||||
)
|
||||
|
||||
bt_device_id: int = config[CONF_BT_DEVICE_ID]
|
||||
|
||||
beacons: dict[str, dict[str, str]] = config[CONF_BEACONS]
|
||||
devices: list[EddystoneTemp] = []
|
||||
|
||||
for dev_name, properties in beacons.items():
|
||||
namespace = get_from_conf(properties, CONF_NAMESPACE, 20)
|
||||
instance = get_from_conf(properties, CONF_INSTANCE, 12)
|
||||
name = properties.get(CONF_NAME, dev_name)
|
||||
|
||||
if instance is None or namespace is None:
|
||||
_LOGGER.error("Skipping %s", dev_name)
|
||||
continue
|
||||
|
||||
devices.append(EddystoneTemp(name, namespace, instance))
|
||||
|
||||
if devices:
|
||||
mon = Monitor(hass, devices, bt_device_id)
|
||||
|
||||
def monitor_stop(event: Event) -> None:
|
||||
"""Stop the monitor thread."""
|
||||
_LOGGER.debug("Stopping scanner for Eddystone beacons")
|
||||
mon.stop()
|
||||
|
||||
def monitor_start(event: Event) -> None:
|
||||
"""Start the monitor thread."""
|
||||
_LOGGER.debug("Starting scanner for Eddystone beacons")
|
||||
mon.start()
|
||||
|
||||
add_entities(devices)
|
||||
mon.start()
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, monitor_start)
|
||||
else:
|
||||
_LOGGER.warning("No devices were added")
|
||||
|
||||
|
||||
def get_from_conf(config: dict[str, str], config_key: str, length: int) -> str | None:
|
||||
"""Retrieve value from config and validate length."""
|
||||
string = config[config_key]
|
||||
if len(string) != length:
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Error in configuration parameter %s: Must be exactly %d "
|
||||
"bytes. Device will not be added"
|
||||
),
|
||||
config_key,
|
||||
length / 2,
|
||||
)
|
||||
return None
|
||||
return string
|
||||
|
||||
|
||||
class EddystoneTemp(SensorEntity):
|
||||
"""Representation of a temperature sensor."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, name: str, namespace: str, instance: str) -> None:
|
||||
"""Initialize a sensor."""
|
||||
self._attr_name = name
|
||||
self.namespace = namespace
|
||||
self.instance = instance
|
||||
self.bt_addr = None
|
||||
self.temperature = STATE_UNKNOWN
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the device."""
|
||||
return self.temperature
|
||||
|
||||
|
||||
class Monitor:
|
||||
"""Continuously scan for BLE advertisements."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, devices: list[EddystoneTemp], bt_device_id: int
|
||||
) -> None:
|
||||
"""Construct interface object."""
|
||||
self.hass = hass
|
||||
|
||||
# List of beacons to monitor
|
||||
self.devices = devices
|
||||
# Number of the bt device (hciX)
|
||||
self.bt_device_id = bt_device_id
|
||||
|
||||
def callback(bt_addr, _, packet, additional_info):
|
||||
"""Handle new packets."""
|
||||
self.process_packet(
|
||||
additional_info["namespace"],
|
||||
additional_info["instance"],
|
||||
packet.temperature,
|
||||
)
|
||||
|
||||
device_filters = [EddystoneFilter(d.namespace, d.instance) for d in devices]
|
||||
|
||||
self.scanner = BeaconScanner(
|
||||
callback, bt_device_id, device_filters, EddystoneTLMFrame
|
||||
)
|
||||
self.scanning = False
|
||||
|
||||
def start(self) -> None:
|
||||
"""Continuously scan for BLE advertisements."""
|
||||
if not self.scanning:
|
||||
self.scanner.start()
|
||||
self.scanning = True
|
||||
else:
|
||||
_LOGGER.debug("start() called, but scanner is already running")
|
||||
|
||||
def process_packet(self, namespace, instance, temperature) -> None:
|
||||
"""Assign temperature to device."""
|
||||
_LOGGER.debug(
|
||||
"Received temperature for <%s,%s>: %d", namespace, instance, temperature
|
||||
)
|
||||
|
||||
for dev in self.devices:
|
||||
if (
|
||||
dev.namespace == namespace
|
||||
and dev.instance == instance
|
||||
and dev.temperature != temperature
|
||||
):
|
||||
dev.temperature = temperature
|
||||
dev.schedule_update_ha_state()
|
||||
|
||||
def stop(self) -> None:
|
||||
"""Signal runner to stop and join thread."""
|
||||
if self.scanning:
|
||||
_LOGGER.debug("Stopping")
|
||||
self.scanner.stop()
|
||||
_LOGGER.debug("Stopped")
|
||||
self.scanning = False
|
||||
else:
|
||||
_LOGGER.debug("stop() called but scanner was not running")
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eheimdigital"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["eheimdigital==1.4.0"],
|
||||
"requirements": ["eheimdigital==1.3.0"],
|
||||
"zeroconf": [
|
||||
{ "name": "eheimdigital._http._tcp.local.", "type": "_http._tcp.local." }
|
||||
]
|
||||
|
||||
@@ -296,7 +296,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_discovered_connection()
|
||||
return await self.async_step_manual_connection()
|
||||
|
||||
current_unique_ids = self._async_current_ids(include_ignore=False)
|
||||
current_unique_ids = self._async_current_ids()
|
||||
current_hosts = {
|
||||
hostname_from_url(entry.data[CONF_HOST])
|
||||
for entry in self._async_current_entries(include_ignore=False)
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.5.0",
|
||||
"aioesphomeapi==42.4.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -77,7 +77,7 @@ class EufyLifeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data={CONF_MODEL: model},
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if (
|
||||
|
||||
@@ -40,9 +40,7 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
|
||||
client = Firefly(
|
||||
api_url=data[CONF_URL],
|
||||
api_key=data[CONF_API_KEY],
|
||||
session=async_get_clientsession(
|
||||
hass=hass, verify_ssl=data[CONF_VERIFY_SSL]
|
||||
),
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
await client.get_about()
|
||||
except FireflyAuthenticationError:
|
||||
@@ -129,51 +127,6 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconf_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reconf_entry.data,
|
||||
**user_input,
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
|
||||
return self.async_update_reload_and_abort(
|
||||
reconf_entry,
|
||||
data_updates={
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
suggested_values=user_input or reconf_entry.data.copy(),
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -21,20 +20,6 @@
|
||||
},
|
||||
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "[%key:component::firefly_iii::config::step::user::data_description::api_key%]",
|
||||
"url": "[%key:common::config_flow::data::url%]",
|
||||
"verify_ssl": "[%key:component::firefly_iii::config::step::user::data_description::verify_ssl%]"
|
||||
},
|
||||
"description": "Use the following form to reconfigure your Firefly III instance.",
|
||||
"title": "Reconfigure Firefly III Integration"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
|
||||
152
homeassistant/components/flick_electric/__init__.py
Normal file
152
homeassistant/components/flick_electric/__init__.py
Normal file
@@ -0,0 +1,152 @@
|
||||
"""The Flick Electric integration."""
|
||||
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ACCESS_TOKEN,
|
||||
CONF_CLIENT_ID,
|
||||
CONF_CLIENT_SECRET,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_ID_TOKEN = "id_token"
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Set up Flick Electric from a config entry."""
|
||||
auth = HassFlickAuth(hass, entry)
|
||||
|
||||
coordinator = FlickElectricDataCoordinator(hass, entry, FlickAPI(auth))
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: FlickConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 2:
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
api = FlickAPI(HassFlickAuth(hass, config_entry))
|
||||
|
||||
accounts = await api.getCustomerAccounts()
|
||||
active_accounts = [
|
||||
account for account in accounts if account["status"] == "active"
|
||||
]
|
||||
|
||||
# A single active account can be auto-migrated
|
||||
if (len(active_accounts)) == 1:
|
||||
account = active_accounts[0]
|
||||
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_ACCOUNT_ID] = account["id"]
|
||||
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
title=account["address"],
|
||||
unique_id=account["id"],
|
||||
data=new_data,
|
||||
version=2,
|
||||
)
|
||||
return True
|
||||
|
||||
config_entry.async_start_reauth(hass, data={**config_entry.data})
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class HassFlickAuth(SimpleFlickAuth):
|
||||
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
|
||||
"""Flick authentication based on a Home Assistant entity config."""
|
||||
super().__init__(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
||||
websession=aiohttp_client.async_get_clientsession(hass),
|
||||
)
|
||||
self._entry = entry
|
||||
self._hass = hass
|
||||
|
||||
async def _get_entry_token(self) -> dict[str, Any]:
|
||||
# No token saved, generate one
|
||||
if (
|
||||
CONF_TOKEN_EXPIRY not in self._entry.data
|
||||
or CONF_ACCESS_TOKEN not in self._entry.data
|
||||
):
|
||||
await self._update_token()
|
||||
|
||||
# Token is expired, generate a new one
|
||||
if self._entry.data[CONF_TOKEN_EXPIRY] <= dt.now().timestamp():
|
||||
await self._update_token()
|
||||
|
||||
return self._entry.data[CONF_ACCESS_TOKEN]
|
||||
|
||||
async def _update_token(self):
|
||||
_LOGGER.debug("Fetching new access token")
|
||||
|
||||
token = await super().get_new_token(
|
||||
self._username, self._password, self._client_id, self._client_secret
|
||||
)
|
||||
|
||||
_LOGGER.debug("New token: %s", token)
|
||||
|
||||
# Flick will send the same token, but expiry is relative - so grab it from the token
|
||||
token_decoded = jwt.decode(
|
||||
token[CONF_ID_TOKEN], options={"verify_signature": False}
|
||||
)
|
||||
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._entry,
|
||||
data={
|
||||
**self._entry.data,
|
||||
CONF_ACCESS_TOKEN: token,
|
||||
CONF_TOKEN_EXPIRY: token_decoded["exp"],
|
||||
},
|
||||
)
|
||||
|
||||
async def async_get_access_token(self):
|
||||
"""Get Access Token from HASS Storage."""
|
||||
token = await self._get_entry_token()
|
||||
|
||||
return token[CONF_ID_TOKEN]
|
||||
210
homeassistant/components/flick_electric/config_flow.py
Normal file
210
homeassistant/components/flick_electric/config_flow.py
Normal file
@@ -0,0 +1,210 @@
|
||||
"""Config Flow for Flick Electric integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientResponseError
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
from pyflick.types import APIException, AuthException, CustomerAccount
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_CLIENT_SECRET,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LOGIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
vol.Optional(CONF_CLIENT_ID): str,
|
||||
vol.Optional(CONF_CLIENT_SECRET): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Flick config flow."""
|
||||
|
||||
VERSION = 2
|
||||
auth: AbstractFlickAuth
|
||||
accounts: list[CustomerAccount]
|
||||
data: dict[str, Any]
|
||||
|
||||
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
|
||||
self.auth = SimpleFlickAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
websession=aiohttp_client.async_get_clientsession(self.hass),
|
||||
client_id=user_input.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=user_input.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
||||
)
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
token = await self.auth.async_get_access_token()
|
||||
except (TimeoutError, ClientResponseError) as err:
|
||||
raise CannotConnect from err
|
||||
except AuthException as err:
|
||||
raise InvalidAuth from err
|
||||
|
||||
return token is not None
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Ask user to select account."""
|
||||
|
||||
errors = {}
|
||||
if user_input is not None and CONF_ACCOUNT_ID in user_input:
|
||||
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
user_input[CONF_ACCOUNT_ID]
|
||||
)
|
||||
try:
|
||||
# Ensure supply node is active
|
||||
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthException:
|
||||
# We should never get here as we have a valid token
|
||||
return self.async_abort(reason="no_permissions")
|
||||
else:
|
||||
# Supply node is active
|
||||
return await self._async_create_entry()
|
||||
|
||||
try:
|
||||
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
active_accounts = [a for a in self.accounts if a["status"] == "active"]
|
||||
|
||||
if len(active_accounts) == 0:
|
||||
return self.async_abort(reason="no_accounts")
|
||||
|
||||
if len(active_accounts) == 1:
|
||||
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
active_accounts[0]["id"]
|
||||
)
|
||||
|
||||
return await self._async_create_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=account["id"], label=account["address"]
|
||||
)
|
||||
for account in active_accounts
|
||||
],
|
||||
mode=SelectSelectorMode.LIST,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle gathering login info."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self._validate_auth(user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self.data = dict(user_input)
|
||||
return await self.async_step_select_account(user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication."""
|
||||
|
||||
self.data = {**user_input}
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
|
||||
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
account = self._get_account(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
# Migration completed
|
||||
if self._get_reauth_entry().version == 1:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
data=self.data,
|
||||
version=self.VERSION,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
def _get_account(self, account_id: str) -> CustomerAccount:
|
||||
"""Get the account for the account ID."""
|
||||
return next(a for a in self.accounts if a["id"] == account_id)
|
||||
|
||||
def _get_supply_node_ref(self, account_id: str) -> str:
|
||||
"""Get the supply node ref for the account."""
|
||||
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
12
homeassistant/components/flick_electric/const.py
Normal file
12
homeassistant/components/flick_electric/const.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Constants for the Flick Electric integration."""
|
||||
|
||||
DOMAIN = "flick_electric"
|
||||
|
||||
CONF_TOKEN_EXPIRY = "expires"
|
||||
CONF_ACCOUNT_ID = "account_id"
|
||||
CONF_SUPPLY_NODE_REF = "supply_node_ref"
|
||||
|
||||
ATTR_START_AT = "start_at"
|
||||
ATTR_END_AT = "end_at"
|
||||
|
||||
ATTR_COMPONENTS = ["retailer", "ea", "metering", "generation", "admin", "network"]
|
||||
55
homeassistant/components/flick_electric/coordinator.py
Normal file
55
homeassistant/components/flick_electric/coordinator.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""Data Coordinator for Flick Electric."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
from pyflick.types import APIException, AuthException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_SUPPLY_NODE_REF
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
|
||||
|
||||
|
||||
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
|
||||
"""Coordinator for flick power price."""
|
||||
|
||||
config_entry: FlickConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: FlickConfigEntry,
|
||||
api: FlickAPI,
|
||||
) -> None:
|
||||
"""Initialize FlickElectricDataCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name="Flick Electric",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.supply_node_ref = config_entry.data[CONF_SUPPLY_NODE_REF]
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> FlickPrice:
|
||||
"""Fetch pricing data from Flick Electric."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._api.getPricing(self.supply_node_ref)
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (APIException, aiohttp.ClientResponseError) as err:
|
||||
raise UpdateFailed from err
|
||||
11
homeassistant/components/flick_electric/manifest.json
Normal file
11
homeassistant/components/flick_electric/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "flick_electric",
|
||||
"name": "Flick Electric",
|
||||
"codeowners": ["@ZephireNZ"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/flick_electric",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==1.1.3"]
|
||||
}
|
||||
72
homeassistant/components/flick_electric/sensor.py
Normal file
72
homeassistant/components/flick_electric/sensor.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Support for Flick Electric Pricing data."""
|
||||
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.const import CURRENCY_CENT, UnitOfEnergy
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FlickConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Flick Sensor Setup."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities([FlickPricingSensor(coordinator)])
|
||||
|
||||
|
||||
class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity):
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
|
||||
_attr_attribution = "Data provided by Flick Electric"
|
||||
_attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "power_price"
|
||||
|
||||
def __init__(self, coordinator: FlickElectricDataCoordinator) -> None:
|
||||
"""Entity object for Flick Electric sensor."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing"
|
||||
|
||||
@property
|
||||
def native_value(self) -> Decimal:
|
||||
"""Return the state of the sensor."""
|
||||
# The API should return a unit price with quantity of 1.0 when no start/end time is provided
|
||||
if self.coordinator.data.quantity != 1:
|
||||
_LOGGER.warning(
|
||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
||||
)
|
||||
return self.coordinator.data.cost * 100
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
components: dict[str, float] = {}
|
||||
|
||||
for component in self.coordinator.data.components:
|
||||
if component.charge_setter not in ATTR_COMPONENTS:
|
||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
||||
continue
|
||||
|
||||
components[component.charge_setter] = float(component.value * 100)
|
||||
|
||||
return {
|
||||
ATTR_START_AT: self.coordinator.data.start_at,
|
||||
ATTR_END_AT: self.coordinator.data.end_at,
|
||||
**components,
|
||||
}
|
||||
39
homeassistant/components/flick_electric/strings.json
Normal file
39
homeassistant/components/flick_electric/strings.json
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"no_accounts": "No services are active on this Flick account",
|
||||
"no_permissions": "Cannot get pricing for this account. Please check user permissions.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"select_account": {
|
||||
"data": {
|
||||
"account_id": "Account"
|
||||
},
|
||||
"title": "Select account"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"client_id": "Client ID (optional)",
|
||||
"client_secret": "Client Secret (optional)",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"title": "Flick Login Credentials"
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"power_price": {
|
||||
"name": "Flick power price"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,9 +21,6 @@ from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .model import FritzEntityDescriptionMixinBase
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class FritzBinarySensorEntityDescription(
|
||||
|
||||
@@ -11,9 +11,6 @@ from .const import DOMAIN
|
||||
from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -23,9 +23,6 @@ from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .sensor import value_scheduled_preset
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF]
|
||||
PRESET_HOLIDAY = "holiday"
|
||||
PRESET_SUMMER = "summer"
|
||||
|
||||
@@ -16,9 +16,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -18,9 +18,6 @@ from .const import COLOR_MODE, LOGGER
|
||||
from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -34,9 +34,6 @@ from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
from .model import FritzEntityDescriptionMixinBase
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class FritzSensorEntityDescription(
|
||||
|
||||
@@ -13,9 +13,6 @@ from .const import DOMAIN
|
||||
from .coordinator import FritzboxConfigEntry
|
||||
from .entity import FritzBoxDeviceEntity
|
||||
|
||||
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251029.1"]
|
||||
"requirements": ["home-assistant-frontend==20251029.0"]
|
||||
}
|
||||
|
||||
@@ -43,9 +43,6 @@ from .coordinator import GiosConfigEntry, GiosDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class GiosSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
@@ -14,10 +14,6 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"station_id": "Measuring station"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "Config entry name, by default, this is the name of your Home Assistant instance.",
|
||||
"station_id": "The name of the measuring station where the environmental data is collected."
|
||||
},
|
||||
"title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)"
|
||||
}
|
||||
}
|
||||
|
||||
3
homeassistant/components/gstreamer/__init__.py
Normal file
3
homeassistant/components/gstreamer/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""The gstreamer component."""
|
||||
|
||||
DOMAIN = "gstreamer"
|
||||
10
homeassistant/components/gstreamer/manifest.json
Normal file
10
homeassistant/components/gstreamer/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "gstreamer",
|
||||
"name": "GStreamer",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/gstreamer",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["gsp"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["gstreamer-player==1.1.2"]
|
||||
}
|
||||
195
homeassistant/components/gstreamer/media_player.py
Normal file
195
homeassistant/components/gstreamer/media_player.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""Play media via gstreamer."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from gsp import STATE_IDLE, STATE_PAUSED, STATE_PLAYING, GstreamerPlayer
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA,
|
||||
BrowseMedia,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
MediaType,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_PIPELINE = "pipeline"
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
|
||||
{vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_PIPELINE): cv.string}
|
||||
)
|
||||
|
||||
GSP_STATE_MAPPING = {
|
||||
STATE_IDLE: MediaPlayerState.IDLE,
|
||||
STATE_PAUSED: MediaPlayerState.PAUSED,
|
||||
STATE_PLAYING: MediaPlayerState.PLAYING,
|
||||
}
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Gstreamer platform."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "GStreamer",
|
||||
},
|
||||
)
|
||||
|
||||
name = config.get(CONF_NAME)
|
||||
pipeline = config.get(CONF_PIPELINE)
|
||||
player = GstreamerPlayer(pipeline)
|
||||
|
||||
def _shutdown(call):
|
||||
"""Quit the player on shutdown."""
|
||||
player.quit()
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||
add_entities([GstreamerDevice(player, name)])
|
||||
|
||||
|
||||
class GstreamerDevice(MediaPlayerEntity):
|
||||
"""Representation of a Gstreamer device."""
|
||||
|
||||
_attr_media_content_type = MediaType.MUSIC
|
||||
_attr_supported_features = (
|
||||
MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.PLAY
|
||||
| MediaPlayerEntityFeature.PAUSE
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
| MediaPlayerEntityFeature.NEXT_TRACK
|
||||
| MediaPlayerEntityFeature.BROWSE_MEDIA
|
||||
)
|
||||
|
||||
def __init__(self, player: GstreamerPlayer, name: str | None) -> None:
|
||||
"""Initialize the Gstreamer device."""
|
||||
self._player = player
|
||||
self._name = name or DOMAIN
|
||||
self._attr_state = MediaPlayerState.IDLE
|
||||
self._volume = None
|
||||
self._duration = None
|
||||
self._uri = None
|
||||
self._title = None
|
||||
self._artist = None
|
||||
self._album = None
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update properties."""
|
||||
self._attr_state = GSP_STATE_MAPPING.get(self._player.state)
|
||||
self._volume = self._player.volume
|
||||
self._duration = self._player.duration
|
||||
self._uri = self._player.uri
|
||||
self._title = self._player.title
|
||||
self._album = self._player.album
|
||||
self._artist = self._player.artist
|
||||
|
||||
def set_volume_level(self, volume: float) -> None:
|
||||
"""Set the volume level."""
|
||||
self._player.volume = volume
|
||||
|
||||
async def async_play_media(
|
||||
self, media_type: MediaType | str, media_id: str, **kwargs: Any
|
||||
) -> None:
|
||||
"""Play media."""
|
||||
# Handle media_source
|
||||
if media_source.is_media_source_id(media_id):
|
||||
sourced_media = await media_source.async_resolve_media(
|
||||
self.hass, media_id, self.entity_id
|
||||
)
|
||||
media_id = sourced_media.url
|
||||
|
||||
elif media_type != MediaType.MUSIC:
|
||||
_LOGGER.error("Invalid media type")
|
||||
return
|
||||
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
await self.hass.async_add_executor_job(self._player.queue, media_id)
|
||||
|
||||
def media_play(self) -> None:
|
||||
"""Play."""
|
||||
self._player.play()
|
||||
|
||||
def media_pause(self) -> None:
|
||||
"""Pause."""
|
||||
self._player.pause()
|
||||
|
||||
def media_next_track(self) -> None:
|
||||
"""Next track."""
|
||||
self._player.next()
|
||||
|
||||
@property
|
||||
def media_content_id(self):
|
||||
"""Content ID of currently playing media."""
|
||||
return self._uri
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def volume_level(self):
|
||||
"""Return the volume level."""
|
||||
return self._volume
|
||||
|
||||
@property
|
||||
def media_duration(self):
|
||||
"""Duration of current playing media in seconds."""
|
||||
return self._duration
|
||||
|
||||
@property
|
||||
def media_title(self):
|
||||
"""Media title."""
|
||||
return self._title
|
||||
|
||||
@property
|
||||
def media_artist(self):
|
||||
"""Media artist."""
|
||||
return self._artist
|
||||
|
||||
@property
|
||||
def media_album_name(self):
|
||||
"""Media album."""
|
||||
return self._album
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
media_content_type: MediaType | str | None = None,
|
||||
media_content_id: str | None = None,
|
||||
) -> BrowseMedia:
|
||||
"""Implement the websocket media browsing helper."""
|
||||
return await media_source.async_browse_media(
|
||||
self.hass,
|
||||
media_content_id,
|
||||
content_filter=lambda item: item.media_content_type.startswith("audio/"),
|
||||
)
|
||||
@@ -620,11 +620,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
# Unload coordinator
|
||||
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||
coordinator.unload()
|
||||
|
||||
# Pop coordinator
|
||||
# Pop add-on data
|
||||
hass.data.pop(ADDONS_COORDINATOR, None)
|
||||
|
||||
return unload_ok
|
||||
|
||||
@@ -563,8 +563,3 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
self.async_set_updated_data(data)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
|
||||
|
||||
@callback
|
||||
def unload(self) -> None:
|
||||
"""Clean up when config entry unloaded."""
|
||||
self.jobs.unload()
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, replace
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
@@ -30,8 +29,6 @@ from .const import (
|
||||
)
|
||||
from .handler import get_supervisor_client
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class JobSubscription:
|
||||
@@ -48,7 +45,7 @@ class JobSubscription:
|
||||
event_callback: Callable[[Job], Any]
|
||||
uuid: str | None = None
|
||||
name: str | None = None
|
||||
reference: str | None = None
|
||||
reference: str | None | type[Any] = Any
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Validate at least one filter option is present."""
|
||||
@@ -61,7 +58,7 @@ class JobSubscription:
|
||||
"""Return true if job matches subscription filters."""
|
||||
if self.uuid:
|
||||
return job.uuid == self.uuid
|
||||
return job.name == self.name and self.reference in (None, job.reference)
|
||||
return job.name == self.name and self.reference in (Any, job.reference)
|
||||
|
||||
|
||||
class SupervisorJobs:
|
||||
@@ -73,7 +70,6 @@ class SupervisorJobs:
|
||||
self._supervisor_client = get_supervisor_client(hass)
|
||||
self._jobs: dict[UUID, Job] = {}
|
||||
self._subscriptions: set[JobSubscription] = set()
|
||||
self._dispatcher_disconnect: Callable[[], None] | None = None
|
||||
|
||||
@property
|
||||
def current_jobs(self) -> list[Job]:
|
||||
@@ -83,24 +79,20 @@ class SupervisorJobs:
|
||||
def subscribe(self, subscription: JobSubscription) -> CALLBACK_TYPE:
|
||||
"""Subscribe to updates for job. Return callback is used to unsubscribe.
|
||||
|
||||
If any jobs match the subscription at the time this is called, runs the
|
||||
callback on them.
|
||||
If any jobs match the subscription at the time this is called, creates
|
||||
tasks to run their callback on it.
|
||||
"""
|
||||
self._subscriptions.add(subscription)
|
||||
|
||||
# Run the callback on each existing match
|
||||
# We catch all errors to prevent an error in one from stopping the others
|
||||
for match in [job for job in self._jobs.values() if subscription.matches(job)]:
|
||||
try:
|
||||
return subscription.event_callback(match)
|
||||
except Exception as err: # noqa: BLE001
|
||||
_LOGGER.error(
|
||||
"Error encountered processing Supervisor Job (%s %s %s) - %s",
|
||||
match.name,
|
||||
match.reference,
|
||||
match.uuid,
|
||||
err,
|
||||
)
|
||||
# As these are callbacks they are safe to run in the event loop
|
||||
# We wrap these in an asyncio task so subscribing does not wait on the logic
|
||||
if matches := [job for job in self._jobs.values() if subscription.matches(job)]:
|
||||
|
||||
async def event_callback_async(job: Job) -> Any:
|
||||
return subscription.event_callback(job)
|
||||
|
||||
for match in matches:
|
||||
self._hass.async_create_task(event_callback_async(match))
|
||||
|
||||
return partial(self._subscriptions.discard, subscription)
|
||||
|
||||
@@ -139,7 +131,7 @@ class SupervisorJobs:
|
||||
|
||||
# If this is the first update register to receive Supervisor events
|
||||
if first_update:
|
||||
self._dispatcher_disconnect = async_dispatcher_connect(
|
||||
async_dispatcher_connect(
|
||||
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_jobs
|
||||
)
|
||||
|
||||
@@ -166,14 +158,3 @@ class SupervisorJobs:
|
||||
for sub in self._subscriptions:
|
||||
if sub.matches(job):
|
||||
sub.event_callback(job)
|
||||
|
||||
# If the job is done, pop it from our cache if present after processing is done
|
||||
if job.done and job.uuid in self._jobs:
|
||||
del self._jobs[job.uuid]
|
||||
|
||||
@callback
|
||||
def unload(self) -> None:
|
||||
"""Unregister with dispatcher on config entry unload."""
|
||||
if self._dispatcher_disconnect:
|
||||
self._dispatcher_disconnect()
|
||||
self._dispatcher_disconnect = None
|
||||
|
||||
@@ -72,7 +72,7 @@ class KegtronConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
87
homeassistant/components/keyboard/__init__.py
Normal file
87
homeassistant/components/keyboard/__init__.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""Support to emulate keyboard presses on host machine."""
|
||||
|
||||
from pykeyboard import PyKeyboard
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
SERVICE_MEDIA_NEXT_TRACK,
|
||||
SERVICE_MEDIA_PLAY_PAUSE,
|
||||
SERVICE_MEDIA_PREVIOUS_TRACK,
|
||||
SERVICE_VOLUME_DOWN,
|
||||
SERVICE_VOLUME_MUTE,
|
||||
SERVICE_VOLUME_UP,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
DOMAIN = "keyboard"
|
||||
|
||||
TAP_KEY_SCHEMA = vol.Schema({})
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Listen for keyboard events."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Keyboard",
|
||||
},
|
||||
)
|
||||
|
||||
keyboard = PyKeyboard()
|
||||
keyboard.special_key_assignment()
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
SERVICE_VOLUME_UP,
|
||||
lambda service: keyboard.tap_key(keyboard.volume_up_key),
|
||||
schema=TAP_KEY_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
SERVICE_VOLUME_DOWN,
|
||||
lambda service: keyboard.tap_key(keyboard.volume_down_key),
|
||||
schema=TAP_KEY_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
SERVICE_VOLUME_MUTE,
|
||||
lambda service: keyboard.tap_key(keyboard.volume_mute_key),
|
||||
schema=TAP_KEY_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
SERVICE_MEDIA_PLAY_PAUSE,
|
||||
lambda service: keyboard.tap_key(keyboard.media_play_pause_key),
|
||||
schema=TAP_KEY_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
SERVICE_MEDIA_NEXT_TRACK,
|
||||
lambda service: keyboard.tap_key(keyboard.media_next_track_key),
|
||||
schema=TAP_KEY_SCHEMA,
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN,
|
||||
SERVICE_MEDIA_PREVIOUS_TRACK,
|
||||
lambda service: keyboard.tap_key(keyboard.media_prev_track_key),
|
||||
schema=TAP_KEY_SCHEMA,
|
||||
)
|
||||
return True
|
||||
22
homeassistant/components/keyboard/icons.json
Normal file
22
homeassistant/components/keyboard/icons.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"services": {
|
||||
"media_next_track": {
|
||||
"service": "mdi:skip-next"
|
||||
},
|
||||
"media_play_pause": {
|
||||
"service": "mdi:play-pause"
|
||||
},
|
||||
"media_prev_track": {
|
||||
"service": "mdi:skip-previous"
|
||||
},
|
||||
"volume_down": {
|
||||
"service": "mdi:volume-low"
|
||||
},
|
||||
"volume_mute": {
|
||||
"service": "mdi:volume-off"
|
||||
},
|
||||
"volume_up": {
|
||||
"service": "mdi:volume-high"
|
||||
}
|
||||
}
|
||||
}
|
||||
10
homeassistant/components/keyboard/manifest.json
Normal file
10
homeassistant/components/keyboard/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "keyboard",
|
||||
"name": "Keyboard",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/keyboard",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pykeyboard"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pyuserinput==0.1.11"]
|
||||
}
|
||||
6
homeassistant/components/keyboard/services.yaml
Normal file
6
homeassistant/components/keyboard/services.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
volume_up:
|
||||
volume_down:
|
||||
volume_mute:
|
||||
media_play_pause:
|
||||
media_next_track:
|
||||
media_prev_track:
|
||||
28
homeassistant/components/keyboard/strings.json
Normal file
28
homeassistant/components/keyboard/strings.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"services": {
|
||||
"media_next_track": {
|
||||
"description": "Simulates a key press of the \"Media Next Track\" button on Home Assistant's host machine.",
|
||||
"name": "Media next track"
|
||||
},
|
||||
"media_play_pause": {
|
||||
"description": "Simulates a key press of the \"Media Play/Pause\" button on Home Assistant's host machine.",
|
||||
"name": "Media play/pause"
|
||||
},
|
||||
"media_prev_track": {
|
||||
"description": "Simulates a key press of the \"Media Previous Track\" button on Home Assistant's host machine.",
|
||||
"name": "Media previous track"
|
||||
},
|
||||
"volume_down": {
|
||||
"description": "Simulates a key press of the \"Volume Down\" button on Home Assistant's host machine.",
|
||||
"name": "Volume down"
|
||||
},
|
||||
"volume_mute": {
|
||||
"description": "Simulates a key press of the \"Volume Mute\" button on Home Assistant's host machine.",
|
||||
"name": "Volume mute"
|
||||
},
|
||||
"volume_up": {
|
||||
"description": "Simulates a key press of the \"Volume Up\" button on Home Assistant's host machine.",
|
||||
"name": "Volume up"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,7 +85,7 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovered_adv:
|
||||
self._discovered_advs[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
self._ble_device = discovery_info.device
|
||||
address = discovery_info.address
|
||||
|
||||
@@ -299,8 +299,8 @@ def _create_climate_ui(xknx: XKNX, conf: ConfigExtractor, name: str) -> XknxClim
|
||||
group_address_active_state=conf.get_state_and_passive(CONF_GA_ACTIVE),
|
||||
group_address_command_value_state=conf.get_state_and_passive(CONF_GA_VALVE),
|
||||
sync_state=sync_state,
|
||||
min_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MIN_TEMP),
|
||||
max_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MAX_TEMP),
|
||||
min_temp=conf.get(ClimateConf.MIN_TEMP),
|
||||
max_temp=conf.get(ClimateConf.MAX_TEMP),
|
||||
mode=climate_mode,
|
||||
group_address_fan_speed=conf.get_write(CONF_GA_FAN_SPEED),
|
||||
group_address_fan_speed_state=conf.get_state_and_passive(CONF_GA_FAN_SPEED),
|
||||
@@ -486,7 +486,7 @@ class _KnxClimate(ClimateEntity, _KnxEntityBase):
|
||||
ha_controller_modes.append(self._last_hvac_mode)
|
||||
ha_controller_modes.append(HVACMode.OFF)
|
||||
|
||||
hvac_modes = sorted(set(filter(None, ha_controller_modes)))
|
||||
hvac_modes = list(set(filter(None, ha_controller_modes)))
|
||||
return (
|
||||
hvac_modes
|
||||
if hvac_modes
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.10.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
"knx-frontend==2025.10.26.81530"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ class KulerskyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -79,7 +79,7 @@ class Ld2410BleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -35,7 +35,7 @@ class LeaoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -85,7 +85,7 @@ class LedBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if discovery := self._discovery_info:
|
||||
self._discovered_devices[discovery.address] = discovery
|
||||
else:
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery in async_discovered_service_info(self.hass):
|
||||
if (
|
||||
discovery.address in current_addresses
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/libre_hardware_monitor",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["librehardwaremonitor-api==1.5.0"]
|
||||
"requirements": ["librehardwaremonitor-api==1.4.0"]
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from librehardwaremonitor_api.model import LibreHardwareMonitorSensorData
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorStateClass
|
||||
@@ -53,10 +51,10 @@ class LibreHardwareMonitorSensor(
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_name: str = sensor_data.name
|
||||
self._attr_native_value: str | None = sensor_data.value
|
||||
self._attr_extra_state_attributes: dict[str, Any] = {
|
||||
STATE_MIN_VALUE: sensor_data.min,
|
||||
STATE_MAX_VALUE: sensor_data.max,
|
||||
self.value: str | None = sensor_data.value
|
||||
self._attr_extra_state_attributes: dict[str, str] = {
|
||||
STATE_MIN_VALUE: self._format_number_value(sensor_data.min),
|
||||
STATE_MAX_VALUE: self._format_number_value(sensor_data.max),
|
||||
}
|
||||
self._attr_native_unit_of_measurement = sensor_data.unit
|
||||
self._attr_unique_id: str = f"{entry_id}_{sensor_data.sensor_id}"
|
||||
@@ -74,12 +72,23 @@ class LibreHardwareMonitorSensor(
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if sensor_data := self.coordinator.data.sensor_data.get(self._sensor_id):
|
||||
self._attr_native_value = sensor_data.value
|
||||
self.value = sensor_data.value
|
||||
self._attr_extra_state_attributes = {
|
||||
STATE_MIN_VALUE: sensor_data.min,
|
||||
STATE_MAX_VALUE: sensor_data.max,
|
||||
STATE_MIN_VALUE: self._format_number_value(sensor_data.min),
|
||||
STATE_MAX_VALUE: self._format_number_value(sensor_data.max),
|
||||
}
|
||||
else:
|
||||
self._attr_native_value = None
|
||||
self.value = None
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the formatted sensor value or None if no value is available."""
|
||||
if self.value is not None and self.value != "-":
|
||||
return self._format_number_value(self.value)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _format_number_value(number_str: str) -> str:
|
||||
return number_str.replace(",", ".")
|
||||
|
||||
94
homeassistant/components/lirc/__init__.py
Normal file
94
homeassistant/components/lirc/__init__.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Support for LIRC devices."""
|
||||
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
|
||||
import lirc
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUTTON_NAME = "button_name"
|
||||
|
||||
DOMAIN = "lirc"
|
||||
|
||||
EVENT_IR_COMMAND_RECEIVED = "ir_command_received"
|
||||
|
||||
ICON = "mdi:remote"
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the LIRC capability."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "LIRC",
|
||||
},
|
||||
)
|
||||
# blocking=True gives unexpected behavior (multiple responses for 1 press)
|
||||
# also by not blocking, we allow hass to shut down the thread gracefully
|
||||
# on exit.
|
||||
lirc.init("home-assistant", blocking=False)
|
||||
lirc_interface = LircInterface(hass)
|
||||
|
||||
def _start_lirc(_event):
|
||||
lirc_interface.start()
|
||||
|
||||
def _stop_lirc(_event):
|
||||
lirc_interface.stopped.set()
|
||||
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_lirc)
|
||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_lirc)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class LircInterface(threading.Thread):
|
||||
"""Interfaces with the lirc daemon to read IR commands.
|
||||
|
||||
When using lirc in blocking mode, sometimes repeated commands get produced
|
||||
in the next read of a command so we use a thread here to just wait
|
||||
around until a non-empty response is obtained from lirc.
|
||||
"""
|
||||
|
||||
def __init__(self, hass):
|
||||
"""Construct a LIRC interface object."""
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.stopped = threading.Event()
|
||||
self.hass = hass
|
||||
|
||||
def run(self):
|
||||
"""Run the loop of the LIRC interface thread."""
|
||||
_LOGGER.debug("LIRC interface thread started")
|
||||
while not self.stopped.is_set():
|
||||
try:
|
||||
code = lirc.nextcode() # list; empty if no buttons pressed
|
||||
except lirc.NextCodeError:
|
||||
_LOGGER.warning("Error reading next code from LIRC")
|
||||
code = None
|
||||
# interpret result from python-lirc
|
||||
if code:
|
||||
code = code[0]
|
||||
_LOGGER.debug("Got new LIRC code %s", code)
|
||||
self.hass.bus.fire(EVENT_IR_COMMAND_RECEIVED, {BUTTON_NAME: code})
|
||||
else:
|
||||
time.sleep(0.2)
|
||||
lirc.deinit()
|
||||
_LOGGER.debug("LIRC interface thread stopped")
|
||||
10
homeassistant/components/lirc/manifest.json
Normal file
10
homeassistant/components/lirc/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "lirc",
|
||||
"name": "LIRC",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/lirc",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["lirc"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["python-lirc==1.2.3"]
|
||||
}
|
||||
@@ -93,7 +93,7 @@ class InspectorBLEConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._discovery_info = self._discovered_devices[address]
|
||||
return await self.async_step_check_connection()
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -49,15 +49,6 @@ ATA_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
value_fn=lambda x: x.device.total_energy_consumed,
|
||||
enabled=lambda x: x.device.has_energy_consumed_meter,
|
||||
),
|
||||
MelcloudSensorEntityDescription(
|
||||
key="outside_temperature",
|
||||
translation_key="outside_temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda x: x.device.outdoor_temperature,
|
||||
enabled=lambda x: x.device.has_outdoor_temperature,
|
||||
),
|
||||
)
|
||||
ATW_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
MelcloudSensorEntityDescription(
|
||||
|
||||
@@ -75,7 +75,7 @@ class MelnorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self._create_entry(address)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(
|
||||
self.hass, connectable=True
|
||||
):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pymiele"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pymiele==0.6.0"],
|
||||
"requirements": ["pymiele==0.5.6"],
|
||||
"single_config_entry": true,
|
||||
"zeroconf": ["_mieleathome._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
REVOLUTIONS_PER_MINUTE,
|
||||
STATE_UNKNOWN,
|
||||
EntityCategory,
|
||||
UnitOfEnergy,
|
||||
UnitOfTemperature,
|
||||
@@ -761,35 +762,40 @@ class MieleSensor(MieleEntity, SensorEntity):
|
||||
class MieleRestorableSensor(MieleSensor, RestoreSensor):
|
||||
"""Representation of a Sensor whose internal state can be restored."""
|
||||
|
||||
_attr_native_value: StateType
|
||||
_last_value: StateType
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MieleDataUpdateCoordinator,
|
||||
device_id: str,
|
||||
description: MieleSensorDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, device_id, description)
|
||||
self._last_value = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# recover last value from cache when adding entity
|
||||
last_data = await self.async_get_last_sensor_data()
|
||||
if last_data:
|
||||
self._attr_native_value = last_data.native_value # type: ignore[assignment]
|
||||
last_value = await self.async_get_last_state()
|
||||
if last_value and last_value.state != STATE_UNKNOWN:
|
||||
self._last_value = last_value.state
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the sensor.
|
||||
"""Return the state of the sensor."""
|
||||
return self._last_value
|
||||
|
||||
It is necessary to override `native_value` to fall back to the default
|
||||
attribute-based implementation, instead of the function-based
|
||||
implementation in `MieleSensor`.
|
||||
"""
|
||||
return self._attr_native_value
|
||||
|
||||
def _update_native_value(self) -> None:
|
||||
"""Update the native value attribute of the sensor."""
|
||||
self._attr_native_value = self.entity_description.value_fn(self.device)
|
||||
def _update_last_value(self) -> None:
|
||||
"""Update the last value of the sensor."""
|
||||
self._last_value = self.entity_description.value_fn(self.device)
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._update_native_value()
|
||||
self._update_last_value()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
|
||||
@@ -906,7 +912,7 @@ class MieleProgramIdSensor(MieleSensor):
|
||||
class MieleTimeSensor(MieleRestorableSensor):
|
||||
"""Representation of time sensors keeping state from cache."""
|
||||
|
||||
def _update_native_value(self) -> None:
|
||||
def _update_last_value(self) -> None:
|
||||
"""Update the last value of the sensor."""
|
||||
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
@@ -917,9 +923,7 @@ class MieleTimeSensor(MieleRestorableSensor):
|
||||
current_status == StateStatus.PROGRAM_ENDED
|
||||
and self.entity_description.end_value_fn is not None
|
||||
):
|
||||
self._attr_native_value = self.entity_description.end_value_fn(
|
||||
self._attr_native_value
|
||||
)
|
||||
self._last_value = self.entity_description.end_value_fn(self._last_value)
|
||||
|
||||
# keep value when program ends if no function is specified
|
||||
elif current_status == StateStatus.PROGRAM_ENDED:
|
||||
@@ -927,11 +931,11 @@ class MieleTimeSensor(MieleRestorableSensor):
|
||||
|
||||
# force unknown when appliance is not working (some devices are keeping last value until a new cycle starts)
|
||||
elif current_status in (StateStatus.OFF, StateStatus.ON, StateStatus.IDLE):
|
||||
self._attr_native_value = None
|
||||
self._last_value = None
|
||||
|
||||
# otherwise, cache value and return it
|
||||
else:
|
||||
self._attr_native_value = current_value
|
||||
self._last_value = current_value
|
||||
|
||||
|
||||
class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
@@ -939,13 +943,13 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
|
||||
_is_reporting: bool = False
|
||||
|
||||
def _update_native_value(self) -> None:
|
||||
def _update_last_value(self) -> None:
|
||||
"""Update the last value of the sensor."""
|
||||
current_value = self.entity_description.value_fn(self.device)
|
||||
current_status = StateStatus(self.device.state_status)
|
||||
last_value = (
|
||||
float(cast(str, self._attr_native_value))
|
||||
if self._attr_native_value is not None
|
||||
float(cast(str, self._last_value))
|
||||
if self._last_value is not None and self._last_value != STATE_UNKNOWN
|
||||
else 0
|
||||
)
|
||||
|
||||
@@ -959,7 +963,7 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
StateStatus.SERVICE,
|
||||
):
|
||||
self._is_reporting = False
|
||||
self._attr_native_value = None
|
||||
self._last_value = None
|
||||
|
||||
# appliance might report the last value for consumption of previous cycle and it will report 0
|
||||
# only after a while, so it is necessary to force 0 until we see the 0 value coming from API, unless
|
||||
@@ -969,7 +973,7 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
and not self._is_reporting
|
||||
and last_value > 0
|
||||
):
|
||||
self._attr_native_value = current_value
|
||||
self._last_value = current_value
|
||||
self._is_reporting = True
|
||||
|
||||
elif (
|
||||
@@ -978,12 +982,12 @@ class MieleConsumptionSensor(MieleRestorableSensor):
|
||||
and current_value is not None
|
||||
and cast(int, current_value) > 0
|
||||
):
|
||||
self._attr_native_value = 0
|
||||
self._last_value = 0
|
||||
|
||||
# keep value when program ends
|
||||
elif current_status == StateStatus.PROGRAM_ENDED:
|
||||
pass
|
||||
|
||||
else:
|
||||
self._attr_native_value = current_value
|
||||
self._last_value = current_value
|
||||
self._is_reporting = True
|
||||
|
||||
@@ -72,7 +72,7 @@ class MoatConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
current_addresses = self._async_current_ids()
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -19,11 +19,7 @@ from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL
|
||||
from .coordinator import NASwebCoordinator
|
||||
from .nasweb_data import NASwebData
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
NASWEB_CONFIG_URL = "https://{host}/page"
|
||||
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
"""Platform for NASweb alarms."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from webio_api import Zone as NASwebZone
|
||||
from webio_api.const import STATE_ZONE_ALARM, STATE_ZONE_ARMED, STATE_ZONE_DISARMED
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
DOMAIN as DOMAIN_ALARM_CONTROL_PANEL,
|
||||
AlarmControlPanelEntity,
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
CodeFormat,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
BaseDataUpdateCoordinatorProtocol,
|
||||
)
|
||||
|
||||
from . import NASwebConfigEntry
|
||||
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
ALARM_CONTROL_PANEL_TRANSLATION_KEY = "zone"
|
||||
|
||||
NASWEB_STATE_TO_HA_STATE = {
|
||||
STATE_ZONE_ALARM: AlarmControlPanelState.TRIGGERED,
|
||||
STATE_ZONE_ARMED: AlarmControlPanelState.ARMED_AWAY,
|
||||
STATE_ZONE_DISARMED: AlarmControlPanelState.DISARMED,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: NASwebConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up alarm control panel platform."""
|
||||
coordinator = config.runtime_data
|
||||
current_zones: set[int] = set()
|
||||
|
||||
@callback
|
||||
def _check_entities() -> None:
|
||||
received_zones: dict[int, NASwebZone] = {
|
||||
entry.index: entry for entry in coordinator.webio_api.zones
|
||||
}
|
||||
added = {i for i in received_zones if i not in current_zones}
|
||||
removed = {i for i in current_zones if i not in received_zones}
|
||||
entities_to_add: list[ZoneEntity] = []
|
||||
for index in added:
|
||||
webio_zone = received_zones[index]
|
||||
if not isinstance(webio_zone, NASwebZone):
|
||||
_LOGGER.error("Cannot create ZoneEntity without NASwebZone")
|
||||
continue
|
||||
new_zone = ZoneEntity(coordinator, webio_zone)
|
||||
entities_to_add.append(new_zone)
|
||||
current_zones.add(index)
|
||||
async_add_entities(entities_to_add)
|
||||
entity_registry = er.async_get(hass)
|
||||
for index in removed:
|
||||
unique_id = f"{DOMAIN}.{config.unique_id}.zone.{index}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
DOMAIN_ALARM_CONTROL_PANEL, DOMAIN, unique_id
|
||||
):
|
||||
entity_registry.async_remove(entity_id)
|
||||
current_zones.remove(index)
|
||||
else:
|
||||
_LOGGER.warning("Failed to remove old zone: no entity_id")
|
||||
|
||||
coordinator.async_add_listener(_check_entities)
|
||||
_check_entities()
|
||||
|
||||
|
||||
class ZoneEntity(AlarmControlPanelEntity, BaseCoordinatorEntity):
|
||||
"""Entity representing NASweb zone."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_should_poll = False
|
||||
_attr_translation_key = ALARM_CONTROL_PANEL_TRANSLATION_KEY
|
||||
|
||||
def __init__(
|
||||
self, coordinator: BaseDataUpdateCoordinatorProtocol, nasweb_zone: NASwebZone
|
||||
) -> None:
|
||||
"""Initialize zone entity."""
|
||||
super().__init__(coordinator)
|
||||
self._zone = nasweb_zone
|
||||
self._attr_name = nasweb_zone.name
|
||||
self._attr_translation_placeholders = {"index": f"{nasweb_zone.index:2d}"}
|
||||
self._attr_unique_id = (
|
||||
f"{DOMAIN}.{self._zone.webio_serial}.zone.{self._zone.index}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._zone.webio_serial)},
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
|
||||
def _set_attr_available(
|
||||
self, entity_last_update: float, available: bool | None
|
||||
) -> None:
|
||||
if (
|
||||
self.coordinator.last_update is None
|
||||
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = available if available is not None else False
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_alarm_state = NASWEB_STATE_TO_HA_STATE[self._zone.state]
|
||||
if self._zone.pass_type == 0:
|
||||
self._attr_code_format = CodeFormat.TEXT
|
||||
elif self._zone.pass_type == 1:
|
||||
self._attr_code_format = CodeFormat.NUMBER
|
||||
else:
|
||||
self._attr_code_format = None
|
||||
self._attr_code_arm_required = self._attr_code_format is not None
|
||||
|
||||
self._set_attr_available(self._zone.last_update, self._zone.available)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||
"""
|
||||
|
||||
@property
|
||||
def supported_features(self) -> AlarmControlPanelEntityFeature:
|
||||
"""Return the list of supported features."""
|
||||
return AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
async def async_alarm_arm_away(self, code: str | None = None) -> None:
|
||||
"""Arm away ZoneEntity."""
|
||||
await self._zone.arm(code)
|
||||
|
||||
async def async_alarm_disarm(self, code: str | None = None) -> None:
|
||||
"""Disarm ZoneEntity."""
|
||||
await self._zone.disarm(code)
|
||||
@@ -23,7 +23,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
KEY_INPUTS = "inputs"
|
||||
KEY_OUTPUTS = "outputs"
|
||||
KEY_ZONES = "zones"
|
||||
|
||||
|
||||
class NotificationCoordinator:
|
||||
@@ -104,7 +103,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(data)
|
||||
|
||||
@@ -199,6 +197,5 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(new_data)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user