mirror of
https://github.com/home-assistant/core.git
synced 2025-11-26 02:58:10 +00:00
Compare commits
48 Commits
2025.11.3
...
cursor/add
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b2fe77b7f5 | ||
|
|
d984e4398e | ||
|
|
75bd1a0310 | ||
|
|
5299690cb7 | ||
|
|
98c1dca7a8 | ||
|
|
54c022d58a | ||
|
|
77d40ddc7d | ||
|
|
092841ca5e | ||
|
|
70238a613d | ||
|
|
5b8d373527 | ||
|
|
4e3664b26f | ||
|
|
76f5cc368b | ||
|
|
2f4cd21a14 | ||
|
|
d369aa761a | ||
|
|
d795806e3d | ||
|
|
d45a80ed06 | ||
|
|
09b46d22af | ||
|
|
b157afac13 | ||
|
|
edaf5c8167 | ||
|
|
1d6c9e3d94 | ||
|
|
ddbc96206f | ||
|
|
cee5f4e275 | ||
|
|
03a1ffc59b | ||
|
|
6e921a0192 | ||
|
|
99eb48c27f | ||
|
|
06dbfe52d0 | ||
|
|
b516de119c | ||
|
|
dcb2087f4b | ||
|
|
7de94f3632 | ||
|
|
909e2304c1 | ||
|
|
ae0b854314 | ||
|
|
6a6054afee | ||
|
|
3377e90b81 | ||
|
|
342c7f6510 | ||
|
|
982fba167a | ||
|
|
8026e64d7c | ||
|
|
ebbfd5a6c7 | ||
|
|
356077541c | ||
|
|
0b9a22b089 | ||
|
|
cce6f60b70 | ||
|
|
d57dc5d0cd | ||
|
|
6088f5eef5 | ||
|
|
5c96b11479 | ||
|
|
afda849f3e | ||
|
|
f2f769b34a | ||
|
|
45558f3087 | ||
|
|
c10b643af9 | ||
|
|
569dd2d6b7 |
46
.github/workflows/builder.yml
vendored
46
.github/workflows/builder.yml
vendored
@@ -162,18 +162,6 @@ jobs:
|
|||||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Adjustments for armhf
|
|
||||||
if: matrix.arch == 'armhf'
|
|
||||||
run: |
|
|
||||||
# Pandas has issues building on armhf, it is expected they
|
|
||||||
# will drop the platform in the near future (they consider it
|
|
||||||
# "flimsy" on 386). The following packages depend on pandas,
|
|
||||||
# so we comment them out.
|
|
||||||
sed -i "s|env-canada|# env-canada|g" requirements_all.txt
|
|
||||||
sed -i "s|noaa-coops|# noaa-coops|g" requirements_all.txt
|
|
||||||
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
|
|
||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||||
with:
|
with:
|
||||||
@@ -226,19 +214,11 @@ jobs:
|
|||||||
- odroid-c4
|
- odroid-c4
|
||||||
- odroid-m1
|
- odroid-m1
|
||||||
- odroid-n2
|
- odroid-n2
|
||||||
- odroid-xu
|
|
||||||
- qemuarm
|
|
||||||
- qemuarm-64
|
- qemuarm-64
|
||||||
- qemux86
|
|
||||||
- qemux86-64
|
- qemux86-64
|
||||||
- raspberrypi
|
|
||||||
- raspberrypi2
|
|
||||||
- raspberrypi3
|
|
||||||
- raspberrypi3-64
|
- raspberrypi3-64
|
||||||
- raspberrypi4
|
|
||||||
- raspberrypi4-64
|
- raspberrypi4-64
|
||||||
- raspberrypi5-64
|
- raspberrypi5-64
|
||||||
- tinker
|
|
||||||
- yellow
|
- yellow
|
||||||
- green
|
- green
|
||||||
steps:
|
steps:
|
||||||
@@ -297,6 +277,7 @@ jobs:
|
|||||||
key-description: "Home Assistant Core"
|
key-description: "Home Assistant Core"
|
||||||
version: ${{ needs.init.outputs.version }}
|
version: ${{ needs.init.outputs.version }}
|
||||||
channel: ${{ needs.init.outputs.channel }}
|
channel: ${{ needs.init.outputs.channel }}
|
||||||
|
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||||
|
|
||||||
- name: Update version file (stable -> beta)
|
- name: Update version file (stable -> beta)
|
||||||
if: needs.init.outputs.channel == 'stable'
|
if: needs.init.outputs.channel == 'stable'
|
||||||
@@ -306,6 +287,7 @@ jobs:
|
|||||||
key-description: "Home Assistant Core"
|
key-description: "Home Assistant Core"
|
||||||
version: ${{ needs.init.outputs.version }}
|
version: ${{ needs.init.outputs.version }}
|
||||||
channel: beta
|
channel: beta
|
||||||
|
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||||
|
|
||||||
publish_container:
|
publish_container:
|
||||||
name: Publish meta container for ${{ matrix.registry }}
|
name: Publish meta container for ${{ matrix.registry }}
|
||||||
@@ -357,27 +339,12 @@ jobs:
|
|||||||
|
|
||||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||||
"${registry}/i386-homeassistant:${tag_r}" \
|
|
||||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
|
||||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
|
||||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||||
--os linux --arch amd64
|
--os linux --arch amd64
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
|
||||||
"${registry}/i386-homeassistant:${tag_r}" \
|
|
||||||
--os linux --arch 386
|
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
|
||||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
|
||||||
--os linux --arch arm --variant=v6
|
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
|
||||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
|
||||||
--os linux --arch arm --variant=v7
|
|
||||||
|
|
||||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||||
--os linux --arch arm64 --variant=v8
|
--os linux --arch arm64 --variant=v8
|
||||||
@@ -405,23 +372,14 @@ jobs:
|
|||||||
|
|
||||||
# Pull images from github container registry and verify signature
|
# Pull images from github container registry and verify signature
|
||||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
docker pull "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
docker pull "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
docker pull "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
|
|
||||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
validate_image "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
validate_image "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
|
||||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||||
|
|
||||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||||
# Upload images to dockerhub
|
# Upload images to dockerhub
|
||||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
|
||||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
|
||||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
|
||||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
|||||||
CACHE_VERSION: 1
|
CACHE_VERSION: 1
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 1
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2025.11"
|
HA_SHORT_VERSION: "2025.12"
|
||||||
DEFAULT_PYTHON: "3.13"
|
DEFAULT_PYTHON: "3.13"
|
||||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||||
# 10.3 is the oldest supported version
|
# 10.3 is the oldest supported version
|
||||||
@@ -502,7 +502,6 @@ jobs:
|
|||||||
libavfilter-dev \
|
libavfilter-dev \
|
||||||
libavformat-dev \
|
libavformat-dev \
|
||||||
libavutil-dev \
|
libavutil-dev \
|
||||||
libgammu-dev \
|
|
||||||
libswresample-dev \
|
libswresample-dev \
|
||||||
libswscale-dev \
|
libswscale-dev \
|
||||||
libudev-dev
|
libudev-dev
|
||||||
@@ -801,8 +800,7 @@ jobs:
|
|||||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg
|
||||||
libgammu-dev
|
|
||||||
- *checkout
|
- *checkout
|
||||||
- *setup-python-default
|
- *setup-python-default
|
||||||
- *cache-restore-python-default
|
- *cache-restore-python-default
|
||||||
@@ -853,7 +851,6 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev \
|
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- *checkout
|
- *checkout
|
||||||
- *setup-python-matrix
|
- *setup-python-matrix
|
||||||
@@ -1233,7 +1230,6 @@ jobs:
|
|||||||
bluez \
|
bluez \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev \
|
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- *checkout
|
- *checkout
|
||||||
- *setup-python-matrix
|
- *setup-python-matrix
|
||||||
|
|||||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -228,7 +228,7 @@ jobs:
|
|||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
env-file: true
|
env-file: true
|
||||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||||
constraints: "homeassistant/package_constraints.txt"
|
constraints: "homeassistant/package_constraints.txt"
|
||||||
requirements-diff: "requirements_diff.txt"
|
requirements-diff: "requirements_diff.txt"
|
||||||
|
|||||||
@@ -107,6 +107,7 @@ homeassistant.components.automation.*
|
|||||||
homeassistant.components.awair.*
|
homeassistant.components.awair.*
|
||||||
homeassistant.components.axis.*
|
homeassistant.components.axis.*
|
||||||
homeassistant.components.azure_storage.*
|
homeassistant.components.azure_storage.*
|
||||||
|
homeassistant.components.backblaze_b2.*
|
||||||
homeassistant.components.backup.*
|
homeassistant.components.backup.*
|
||||||
homeassistant.components.baf.*
|
homeassistant.components.baf.*
|
||||||
homeassistant.components.bang_olufsen.*
|
homeassistant.components.bang_olufsen.*
|
||||||
@@ -361,7 +362,6 @@ homeassistant.components.myuplink.*
|
|||||||
homeassistant.components.nam.*
|
homeassistant.components.nam.*
|
||||||
homeassistant.components.nanoleaf.*
|
homeassistant.components.nanoleaf.*
|
||||||
homeassistant.components.nasweb.*
|
homeassistant.components.nasweb.*
|
||||||
homeassistant.components.neato.*
|
|
||||||
homeassistant.components.nest.*
|
homeassistant.components.nest.*
|
||||||
homeassistant.components.netatmo.*
|
homeassistant.components.netatmo.*
|
||||||
homeassistant.components.network.*
|
homeassistant.components.network.*
|
||||||
@@ -395,7 +395,6 @@ homeassistant.components.otbr.*
|
|||||||
homeassistant.components.overkiz.*
|
homeassistant.components.overkiz.*
|
||||||
homeassistant.components.overseerr.*
|
homeassistant.components.overseerr.*
|
||||||
homeassistant.components.p1_monitor.*
|
homeassistant.components.p1_monitor.*
|
||||||
homeassistant.components.pandora.*
|
|
||||||
homeassistant.components.panel_custom.*
|
homeassistant.components.panel_custom.*
|
||||||
homeassistant.components.paperless_ngx.*
|
homeassistant.components.paperless_ngx.*
|
||||||
homeassistant.components.peblar.*
|
homeassistant.components.peblar.*
|
||||||
|
|||||||
16
CODEOWNERS
generated
16
CODEOWNERS
generated
@@ -196,6 +196,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||||
/homeassistant/components/azure_storage/ @zweckj
|
/homeassistant/components/azure_storage/ @zweckj
|
||||||
/tests/components/azure_storage/ @zweckj
|
/tests/components/azure_storage/ @zweckj
|
||||||
|
/homeassistant/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||||
|
/tests/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||||
/homeassistant/components/backup/ @home-assistant/core
|
/homeassistant/components/backup/ @home-assistant/core
|
||||||
/tests/components/backup/ @home-assistant/core
|
/tests/components/backup/ @home-assistant/core
|
||||||
/homeassistant/components/baf/ @bdraco @jfroy
|
/homeassistant/components/baf/ @bdraco @jfroy
|
||||||
@@ -316,8 +318,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/cpuspeed/ @fabaff
|
/tests/components/cpuspeed/ @fabaff
|
||||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||||
/homeassistant/components/cups/ @fabaff
|
|
||||||
/tests/components/cups/ @fabaff
|
|
||||||
/homeassistant/components/cync/ @Kinachi249
|
/homeassistant/components/cync/ @Kinachi249
|
||||||
/tests/components/cync/ @Kinachi249
|
/tests/components/cync/ @Kinachi249
|
||||||
/homeassistant/components/daikin/ @fredrike
|
/homeassistant/components/daikin/ @fredrike
|
||||||
@@ -510,8 +510,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/fjaraskupan/ @elupus
|
/tests/components/fjaraskupan/ @elupus
|
||||||
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
|
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||||
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
|
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||||
/homeassistant/components/flick_electric/ @ZephireNZ
|
|
||||||
/tests/components/flick_electric/ @ZephireNZ
|
|
||||||
/homeassistant/components/flipr/ @cnico
|
/homeassistant/components/flipr/ @cnico
|
||||||
/tests/components/flipr/ @cnico
|
/tests/components/flipr/ @cnico
|
||||||
/homeassistant/components/flo/ @dmulcahey
|
/homeassistant/components/flo/ @dmulcahey
|
||||||
@@ -1479,8 +1477,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/smhi/ @gjohansson-ST
|
/tests/components/smhi/ @gjohansson-ST
|
||||||
/homeassistant/components/smlight/ @tl-sl
|
/homeassistant/components/smlight/ @tl-sl
|
||||||
/tests/components/smlight/ @tl-sl
|
/tests/components/smlight/ @tl-sl
|
||||||
/homeassistant/components/sms/ @ocalvo
|
|
||||||
/tests/components/sms/ @ocalvo
|
|
||||||
/homeassistant/components/snapcast/ @luar123
|
/homeassistant/components/snapcast/ @luar123
|
||||||
/tests/components/snapcast/ @luar123
|
/tests/components/snapcast/ @luar123
|
||||||
/homeassistant/components/snmp/ @nmaggioni
|
/homeassistant/components/snmp/ @nmaggioni
|
||||||
@@ -1543,8 +1539,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/suez_water/ @ooii @jb101010-2
|
/tests/components/suez_water/ @ooii @jb101010-2
|
||||||
/homeassistant/components/sun/ @home-assistant/core
|
/homeassistant/components/sun/ @home-assistant/core
|
||||||
/tests/components/sun/ @home-assistant/core
|
/tests/components/sun/ @home-assistant/core
|
||||||
/homeassistant/components/sunricher_dali/ @niracler
|
/homeassistant/components/sunricher_dali_center/ @niracler
|
||||||
/tests/components/sunricher_dali/ @niracler
|
/tests/components/sunricher_dali_center/ @niracler
|
||||||
/homeassistant/components/supla/ @mwegrzynek
|
/homeassistant/components/supla/ @mwegrzynek
|
||||||
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
/homeassistant/components/surepetcare/ @benleb @danielhiversen
|
||||||
/tests/components/surepetcare/ @benleb @danielhiversen
|
/tests/components/surepetcare/ @benleb @danielhiversen
|
||||||
@@ -1721,8 +1717,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||||
/homeassistant/components/valve/ @home-assistant/core
|
/homeassistant/components/valve/ @home-assistant/core
|
||||||
/tests/components/valve/ @home-assistant/core
|
/tests/components/valve/ @home-assistant/core
|
||||||
/homeassistant/components/vegehub/ @ghowevege
|
/homeassistant/components/vegehub/ @thulrus
|
||||||
/tests/components/vegehub/ @ghowevege
|
/tests/components/vegehub/ @thulrus
|
||||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||||
/tests/components/velbus/ @Cereal2nd @brefra
|
/tests/components/velbus/ @Cereal2nd @brefra
|
||||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||||
|
|||||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -25,7 +25,7 @@ RUN \
|
|||||||
"armv7") go2rtc_suffix='arm' ;; \
|
"armv7") go2rtc_suffix='arm' ;; \
|
||||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||||
esac \
|
esac \
|
||||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||||
&& chmod +x /bin/go2rtc \
|
&& chmod +x /bin/go2rtc \
|
||||||
# Verify go2rtc can be executed
|
# Verify go2rtc can be executed
|
||||||
&& go2rtc --version
|
&& go2rtc --version
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ RUN \
|
|||||||
libavcodec-dev \
|
libavcodec-dev \
|
||||||
libavdevice-dev \
|
libavdevice-dev \
|
||||||
libavutil-dev \
|
libavutil-dev \
|
||||||
libgammu-dev \
|
|
||||||
libswscale-dev \
|
libswscale-dev \
|
||||||
libswresample-dev \
|
libswresample-dev \
|
||||||
libavfilter-dev \
|
libavfilter-dev \
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.11.0
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.11.0
|
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.11.0
|
|
||||||
cosign:
|
cosign:
|
||||||
base_identity: https://github.com/home-assistant/docker/.*
|
base_identity: https://github.com/home-assistant/docker/.*
|
||||||
identity: https://github.com/home-assistant/core/.*
|
identity: https://github.com/home-assistant/core/.*
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ Sending HOTP through notify service
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections import OrderedDict
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
@@ -303,14 +304,13 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
|||||||
if not self._available_notify_services:
|
if not self._available_notify_services:
|
||||||
return self.async_abort(reason="no_available_service")
|
return self.async_abort(reason="no_available_service")
|
||||||
|
|
||||||
schema = vol.Schema(
|
schema: dict[str, Any] = OrderedDict()
|
||||||
{
|
schema["notify_service"] = vol.In(self._available_notify_services)
|
||||||
vol.Required("notify_service"): vol.In(self._available_notify_services),
|
schema["target"] = vol.Optional(str)
|
||||||
vol.Optional("target"): str,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
return self.async_show_form(
|
||||||
|
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
async def async_step_setup(
|
async def async_step_setup(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
|
|||||||
@@ -179,18 +179,12 @@ class Data:
|
|||||||
user_hash = base64.b64decode(found["password"])
|
user_hash = base64.b64decode(found["password"])
|
||||||
|
|
||||||
# bcrypt.checkpw is timing-safe
|
# bcrypt.checkpw is timing-safe
|
||||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||||
# Previously the password was silently truncated.
|
|
||||||
# https://github.com/pyca/bcrypt/pull/1000
|
|
||||||
if not bcrypt.checkpw(password.encode()[:72], user_hash):
|
|
||||||
raise InvalidAuth
|
raise InvalidAuth
|
||||||
|
|
||||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||||
"""Encode a password."""
|
"""Encode a password."""
|
||||||
# With bcrypt 5.0 passing a password longer than 72 bytes raises a ValueError.
|
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||||
# Previously the password was silently truncated.
|
|
||||||
# https://github.com/pyca/bcrypt/pull/1000
|
|
||||||
hashed: bytes = bcrypt.hashpw(password.encode()[:72], bcrypt.gensalt(rounds=12))
|
|
||||||
|
|
||||||
if for_storage:
|
if for_storage:
|
||||||
hashed = base64.b64encode(hashed)
|
hashed = base64.b64encode(hashed)
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ from homeassistant.components.bluetooth import (
|
|||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
from homeassistant.const import CONF_ADDRESS
|
from homeassistant.const import CONF_ADDRESS
|
||||||
|
|
||||||
from .const import DEVICE_MODEL, DOMAIN, MFCT_ID
|
from .const import DOMAIN, MFCT_ID
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -128,15 +128,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Confirm discovery."""
|
"""Confirm discovery."""
|
||||||
assert self._discovered_device is not None
|
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
if self._discovered_device.device.firmware.need_firmware_upgrade:
|
if (
|
||||||
|
self._discovered_device is not None
|
||||||
|
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||||
|
):
|
||||||
return self.async_abort(reason="firmware_upgrade_required")
|
return self.async_abort(reason="firmware_upgrade_required")
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.context["title_placeholders"]["name"],
|
title=self.context["title_placeholders"]["name"], data={}
|
||||||
data={DEVICE_MODEL: self._discovered_device.device.model.value},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self._set_confirm_only()
|
self._set_confirm_only()
|
||||||
@@ -164,10 +164,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
self._discovered_device = discovery
|
self._discovered_device = discovery
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(title=discovery.name, data={})
|
||||||
title=discovery.name,
|
|
||||||
data={DEVICE_MODEL: discovery.device.model.value},
|
|
||||||
)
|
|
||||||
|
|
||||||
current_addresses = self._async_current_ids(include_ignore=False)
|
current_addresses = self._async_current_ids(include_ignore=False)
|
||||||
devices: list[BluetoothServiceInfoBleak] = []
|
devices: list[BluetoothServiceInfoBleak] = []
|
||||||
|
|||||||
@@ -1,16 +1,11 @@
|
|||||||
"""Constants for Airthings BLE."""
|
"""Constants for Airthings BLE."""
|
||||||
|
|
||||||
from airthings_ble import AirthingsDeviceType
|
|
||||||
|
|
||||||
DOMAIN = "airthings_ble"
|
DOMAIN = "airthings_ble"
|
||||||
MFCT_ID = 820
|
MFCT_ID = 820
|
||||||
|
|
||||||
VOLUME_BECQUEREL = "Bq/m³"
|
VOLUME_BECQUEREL = "Bq/m³"
|
||||||
VOLUME_PICOCURIE = "pCi/L"
|
VOLUME_PICOCURIE = "pCi/L"
|
||||||
|
|
||||||
DEVICE_MODEL = "device_model"
|
|
||||||
|
|
||||||
DEFAULT_SCAN_INTERVAL = 300
|
DEFAULT_SCAN_INTERVAL = 300
|
||||||
DEVICE_SPECIFIC_SCAN_INTERVAL = {AirthingsDeviceType.CORENTIUM_HOME_2.value: 1800}
|
|
||||||
|
|
||||||
MAX_RETRIES_AFTER_STARTUP = 5
|
MAX_RETRIES_AFTER_STARTUP = 5
|
||||||
|
|||||||
@@ -16,12 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
|||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
|
||||||
from .const import (
|
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||||
DEFAULT_SCAN_INTERVAL,
|
|
||||||
DEVICE_MODEL,
|
|
||||||
DEVICE_SPECIFIC_SCAN_INTERVAL,
|
|
||||||
DOMAIN,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -39,18 +34,12 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
|||||||
self.airthings = AirthingsBluetoothDeviceData(
|
self.airthings = AirthingsBluetoothDeviceData(
|
||||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||||
)
|
)
|
||||||
|
|
||||||
device_model = entry.data.get(DEVICE_MODEL)
|
|
||||||
interval = DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
|
||||||
device_model, DEFAULT_SCAN_INTERVAL
|
|
||||||
)
|
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=timedelta(seconds=interval),
|
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _async_setup(self) -> None:
|
async def _async_setup(self) -> None:
|
||||||
@@ -69,29 +58,11 @@ class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
|||||||
)
|
)
|
||||||
self.ble_device = ble_device
|
self.ble_device = ble_device
|
||||||
|
|
||||||
if DEVICE_MODEL not in self.config_entry.data:
|
|
||||||
_LOGGER.debug("Fetching device info for migration")
|
|
||||||
try:
|
|
||||||
data = await self.airthings.update_device(self.ble_device)
|
|
||||||
except Exception as err:
|
|
||||||
raise UpdateFailed(
|
|
||||||
f"Unable to fetch data for migration: {err}"
|
|
||||||
) from err
|
|
||||||
|
|
||||||
self.hass.config_entries.async_update_entry(
|
|
||||||
self.config_entry,
|
|
||||||
data={**self.config_entry.data, DEVICE_MODEL: data.model.value},
|
|
||||||
)
|
|
||||||
self.update_interval = timedelta(
|
|
||||||
seconds=DEVICE_SPECIFIC_SCAN_INTERVAL.get(
|
|
||||||
data.model.value, DEFAULT_SCAN_INTERVAL
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _async_update_data(self) -> AirthingsDevice:
|
async def _async_update_data(self) -> AirthingsDevice:
|
||||||
"""Get data from Airthings BLE."""
|
"""Get data from Airthings BLE."""
|
||||||
try:
|
try:
|
||||||
data = await self.airthings.update_device(self.ble_device)
|
data = await self.airthings.update_device(self.ble_device)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
from aioamazondevices.const.metadata import SENSOR_STATE_OFF
|
from aioamazondevices.api import AmazonDevice
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
DOMAIN as BINARY_SENSOR_DOMAIN,
|
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||||
|
|||||||
@@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonEchoApi
|
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||||
from aioamazondevices.exceptions import (
|
from aioamazondevices.exceptions import (
|
||||||
CannotAuthenticate,
|
CannotAuthenticate,
|
||||||
CannotConnect,
|
CannotConnect,
|
||||||
CannotRetrieveData,
|
CannotRetrieveData,
|
||||||
)
|
)
|
||||||
from aioamazondevices.structures import AmazonDevice
|
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@@ -16,7 +15,6 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.debounce import Debouncer
|
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||||
@@ -44,9 +42,6 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
|||||||
name=entry.title,
|
name=entry.title,
|
||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||||
request_refresh_debouncer=Debouncer(
|
|
||||||
hass, _LOGGER, cooldown=30, immediate=False
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
self.api = AmazonEchoApi(
|
self.api = AmazonEchoApi(
|
||||||
session,
|
session,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.diagnostics import async_redact_data
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Defines a base Alexa Devices entity."""
|
"""Defines a base Alexa Devices entity."""
|
||||||
|
|
||||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
from aioamazondevices.api import AmazonDevice
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.const import SPEAKER_GROUP_MODEL
|
||||||
|
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
from homeassistant.helpers.entity import EntityDescription
|
from homeassistant.helpers.entity import EntityDescription
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioamazondevices==8.0.1"]
|
"requirements": ["aioamazondevices==6.5.5"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,9 +6,8 @@ from collections.abc import Awaitable, Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
from aioamazondevices.api import AmazonEchoApi
|
from aioamazondevices.api import AmazonDevice, AmazonEchoApi
|
||||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||||
from aioamazondevices.structures import AmazonDevice
|
|
||||||
|
|
||||||
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ from dataclasses import dataclass
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Final
|
from typing import Final
|
||||||
|
|
||||||
from aioamazondevices.const.schedules import (
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
from aioamazondevices.const import (
|
||||||
NOTIFICATION_ALARM,
|
NOTIFICATION_ALARM,
|
||||||
NOTIFICATION_REMINDER,
|
NOTIFICATION_REMINDER,
|
||||||
NOTIFICATION_TIMER,
|
NOTIFICATION_TIMER,
|
||||||
)
|
)
|
||||||
from aioamazondevices.structures import AmazonDevice
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"""Support for services."""
|
"""Support for services."""
|
||||||
|
|
||||||
from aioamazondevices.const.sounds import SOUNDS_LIST
|
from aioamazondevices.sounds import SOUNDS_LIST
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import TYPE_CHECKING, Any, Final
|
from typing import TYPE_CHECKING, Any, Final
|
||||||
|
|
||||||
from aioamazondevices.structures import AmazonDevice
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.switch import (
|
from homeassistant.components.switch import (
|
||||||
DOMAIN as SWITCH_DOMAIN,
|
DOMAIN as SWITCH_DOMAIN,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from aioamazondevices.const.devices import SPEAKER_GROUP_FAMILY
|
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||||
|
|
||||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="daily_rain",
|
translation_key="daily_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
key=TYPE_LIGHTNING_PER_DAY,
|
key=TYPE_LIGHTNING_PER_DAY,
|
||||||
translation_key="lightning_strikes_per_day",
|
translation_key="lightning_strikes_per_day",
|
||||||
native_unit_of_measurement="strikes",
|
native_unit_of_measurement="strikes",
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="monthly_rain",
|
translation_key="monthly_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="weekly_rain",
|
translation_key="weekly_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
|
|||||||
translation_key="yearly_rain",
|
translation_key="yearly_rain",
|
||||||
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION,
|
device_class=SensorDeviceClass.PRECIPITATION,
|
||||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["python_awair"],
|
"loggers": ["python_awair"],
|
||||||
"requirements": ["python-awair==0.2.5"],
|
"requirements": ["python-awair==0.2.4"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"name": "awair*",
|
"name": "awair*",
|
||||||
|
|||||||
116
homeassistant/components/backblaze_b2/__init__.py
Normal file
116
homeassistant/components/backblaze_b2/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
"""The Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from b2sdk.v2 import B2Api, Bucket, InMemoryAccountInfo, exception
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
|
from homeassistant.helpers.event import async_track_time_interval
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
BACKBLAZE_REALM,
|
||||||
|
CONF_APPLICATION_KEY,
|
||||||
|
CONF_BUCKET,
|
||||||
|
CONF_KEY_ID,
|
||||||
|
DATA_BACKUP_AGENT_LISTENERS,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
from .repairs import (
|
||||||
|
async_check_for_repair_issues,
|
||||||
|
create_bucket_access_restricted_issue,
|
||||||
|
create_bucket_not_found_issue,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type BackblazeConfigEntry = ConfigEntry[Bucket]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||||
|
"""Set up Backblaze B2 from a config entry."""
|
||||||
|
|
||||||
|
info = InMemoryAccountInfo()
|
||||||
|
b2_api = B2Api(info)
|
||||||
|
|
||||||
|
def _authorize_and_get_bucket_sync() -> Bucket:
|
||||||
|
"""Synchronously authorize the Backblaze B2 account and retrieve the bucket.
|
||||||
|
|
||||||
|
This function runs in the event loop's executor as b2sdk operations are blocking.
|
||||||
|
"""
|
||||||
|
b2_api.authorize_account(
|
||||||
|
BACKBLAZE_REALM,
|
||||||
|
entry.data[CONF_KEY_ID],
|
||||||
|
entry.data[CONF_APPLICATION_KEY],
|
||||||
|
)
|
||||||
|
return b2_api.get_bucket_by_name(entry.data[CONF_BUCKET])
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket = await hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||||
|
except exception.Unauthorized as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_credentials",
|
||||||
|
) from err
|
||||||
|
except exception.RestrictedBucket as err:
|
||||||
|
create_bucket_access_restricted_issue(hass, entry, err.bucket_name)
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="restricted_bucket",
|
||||||
|
translation_placeholders={
|
||||||
|
"restricted_bucket_name": err.bucket_name,
|
||||||
|
},
|
||||||
|
) from err
|
||||||
|
except exception.NonExistentBucket as err:
|
||||||
|
create_bucket_not_found_issue(hass, entry, entry.data[CONF_BUCKET])
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_bucket_name",
|
||||||
|
) from err
|
||||||
|
except exception.ConnectionReset as err:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="cannot_connect",
|
||||||
|
) from err
|
||||||
|
except exception.MissingAccountData as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="invalid_auth",
|
||||||
|
) from err
|
||||||
|
|
||||||
|
entry.runtime_data = bucket
|
||||||
|
|
||||||
|
def _async_notify_backup_listeners() -> None:
|
||||||
|
"""Notify any registered backup agent listeners."""
|
||||||
|
_LOGGER.debug("Notifying backup listeners for entry %s", entry.entry_id)
|
||||||
|
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||||
|
listener()
|
||||||
|
|
||||||
|
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||||
|
|
||||||
|
async def _periodic_issue_check(_now: Any) -> None:
|
||||||
|
"""Periodically check for repair issues."""
|
||||||
|
await async_check_for_repair_issues(hass, entry)
|
||||||
|
|
||||||
|
entry.async_on_unload(
|
||||||
|
async_track_time_interval(hass, _periodic_issue_check, timedelta(minutes=30))
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.async_create_task(async_check_for_repair_issues(hass, entry))
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||||
|
"""Unload a Backblaze B2 config entry.
|
||||||
|
|
||||||
|
Any resources directly managed by this entry that need explicit shutdown
|
||||||
|
would be handled here. In this case, the `async_on_state_change` listener
|
||||||
|
handles the notification logic on unload.
|
||||||
|
"""
|
||||||
|
return True
|
||||||
615
homeassistant/components/backblaze_b2/backup.py
Normal file
615
homeassistant/components/backblaze_b2/backup.py
Normal file
@@ -0,0 +1,615 @@
|
|||||||
|
"""Backup platform for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import mimetypes
|
||||||
|
from time import time
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from b2sdk.v2 import FileVersion
|
||||||
|
from b2sdk.v2.exception import B2Error
|
||||||
|
|
||||||
|
from homeassistant.components.backup import (
|
||||||
|
AgentBackup,
|
||||||
|
BackupAgent,
|
||||||
|
BackupAgentError,
|
||||||
|
BackupNotFound,
|
||||||
|
suggested_filename,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.util.async_iterator import AsyncIteratorReader
|
||||||
|
|
||||||
|
from . import BackblazeConfigEntry
|
||||||
|
from .const import (
|
||||||
|
CONF_PREFIX,
|
||||||
|
DATA_BACKUP_AGENT_LISTENERS,
|
||||||
|
DOMAIN,
|
||||||
|
METADATA_FILE_SUFFIX,
|
||||||
|
METADATA_VERSION,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Cache TTL for backup list (in seconds)
|
||||||
|
CACHE_TTL = 300
|
||||||
|
|
||||||
|
|
||||||
|
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||||
|
"""Return the suggested filenames for the backup and metadata files."""
|
||||||
|
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||||
|
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_metadata(raw_content: str) -> dict[str, Any]:
|
||||||
|
"""Parse metadata content from JSON."""
|
||||||
|
try:
|
||||||
|
data = json.loads(raw_content)
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
raise ValueError(f"Invalid JSON format: {err}") from err
|
||||||
|
else:
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise TypeError("JSON content is not a dictionary")
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def _find_backup_file_for_metadata(
|
||||||
|
metadata_filename: str, all_files: dict[str, FileVersion], prefix: str
|
||||||
|
) -> FileVersion | None:
|
||||||
|
"""Find corresponding backup file for metadata file."""
|
||||||
|
base_name = metadata_filename[len(prefix) :].removesuffix(METADATA_FILE_SUFFIX)
|
||||||
|
return next(
|
||||||
|
(
|
||||||
|
file
|
||||||
|
for name, file in all_files.items()
|
||||||
|
if name.startswith(prefix + base_name)
|
||||||
|
and name.endswith(".tar")
|
||||||
|
and name != metadata_filename
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_backup_from_metadata(
|
||||||
|
metadata_content: dict[str, Any], backup_file: FileVersion
|
||||||
|
) -> AgentBackup:
|
||||||
|
"""Construct an AgentBackup from parsed metadata content and the associated backup file."""
|
||||||
|
metadata = metadata_content["backup_metadata"]
|
||||||
|
metadata["size"] = backup_file.size
|
||||||
|
return AgentBackup.from_dict(metadata)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_b2_errors[T](
|
||||||
|
func: Callable[..., Coroutine[Any, Any, T]],
|
||||||
|
) -> Callable[..., Coroutine[Any, Any, T]]:
|
||||||
|
"""Handle B2Errors by converting them to BackupAgentError."""
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||||
|
"""Catch B2Error and raise BackupAgentError."""
|
||||||
|
try:
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
except B2Error as err:
|
||||||
|
error_msg = f"Failed during {func.__name__}"
|
||||||
|
raise BackupAgentError(error_msg) from err
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_backup_agents(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
) -> list[BackupAgent]:
|
||||||
|
"""Return a list of backup agents for all configured Backblaze B2 entries."""
|
||||||
|
entries: list[BackblazeConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||||
|
DOMAIN
|
||||||
|
)
|
||||||
|
return [BackblazeBackupAgent(hass, entry) for entry in entries]
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_register_backup_agents_listener(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
*,
|
||||||
|
listener: Callable[[], None],
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Callable[[], None]:
|
||||||
|
"""Register a listener to be called when backup agents are added or removed.
|
||||||
|
|
||||||
|
:return: A function to unregister the listener.
|
||||||
|
"""
|
||||||
|
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def remove_listener() -> None:
|
||||||
|
"""Remove the listener."""
|
||||||
|
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||||
|
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||||
|
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS, None)
|
||||||
|
|
||||||
|
return remove_listener
|
||||||
|
|
||||||
|
|
||||||
|
class BackblazeBackupAgent(BackupAgent):
|
||||||
|
"""Backup agent for Backblaze B2 cloud storage."""
|
||||||
|
|
||||||
|
domain = DOMAIN
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant, entry: BackblazeConfigEntry) -> None:
|
||||||
|
"""Initialize the Backblaze B2 agent."""
|
||||||
|
super().__init__()
|
||||||
|
self._hass = hass
|
||||||
|
self._bucket = entry.runtime_data
|
||||||
|
self._prefix = entry.data[CONF_PREFIX]
|
||||||
|
|
||||||
|
self.name = entry.title
|
||||||
|
self.unique_id = entry.entry_id
|
||||||
|
|
||||||
|
self._all_files_cache: dict[str, FileVersion] = {}
|
||||||
|
self._all_files_cache_expiration: float = 0.0
|
||||||
|
self._backup_list_cache: dict[str, AgentBackup] = {}
|
||||||
|
self._backup_list_cache_expiration: float = 0.0
|
||||||
|
|
||||||
|
self._all_files_cache_lock = asyncio.Lock()
|
||||||
|
self._backup_list_cache_lock = asyncio.Lock()
|
||||||
|
|
||||||
|
def _is_cache_valid(self, expiration_time: float) -> bool:
|
||||||
|
"""Check if cache is still valid based on expiration time."""
|
||||||
|
return time() <= expiration_time
|
||||||
|
|
||||||
|
async def _cleanup_failed_upload(self, filename: str) -> None:
|
||||||
|
"""Clean up a partially uploaded file after upload failure."""
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Attempting to delete partially uploaded main backup file %s "
|
||||||
|
"due to metadata upload failure",
|
||||||
|
filename,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
uploaded_main_file_info = await self._hass.async_add_executor_job(
|
||||||
|
self._bucket.get_file_info_by_name, filename
|
||||||
|
)
|
||||||
|
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
|
||||||
|
except B2Error:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Failed to clean up partially uploaded main backup file %s. "
|
||||||
|
"Manual intervention may be required to delete it from Backblaze B2",
|
||||||
|
filename,
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Successfully deleted partially uploaded main backup file %s", filename
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _get_file_for_download(self, backup_id: str) -> FileVersion:
|
||||||
|
"""Get backup file for download, raising if not found."""
|
||||||
|
file, _ = await self._find_file_and_metadata_version_by_id(backup_id)
|
||||||
|
if not file:
|
||||||
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
return file
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_download_backup(
|
||||||
|
self, backup_id: str, **kwargs: Any
|
||||||
|
) -> AsyncIterator[bytes]:
|
||||||
|
"""Download a backup from Backblaze B2."""
|
||||||
|
file = await self._get_file_for_download(backup_id)
|
||||||
|
_LOGGER.debug("Downloading %s", file.file_name)
|
||||||
|
|
||||||
|
downloaded_file = await self._hass.async_add_executor_job(file.download)
|
||||||
|
response = downloaded_file.response
|
||||||
|
|
||||||
|
async def stream_response() -> AsyncIterator[bytes]:
|
||||||
|
"""Stream the response into an AsyncIterator."""
|
||||||
|
try:
|
||||||
|
iterator = response.iter_content(chunk_size=1024 * 1024)
|
||||||
|
while True:
|
||||||
|
chunk = await self._hass.async_add_executor_job(
|
||||||
|
next, iterator, None
|
||||||
|
)
|
||||||
|
if chunk is None:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
finally:
|
||||||
|
_LOGGER.debug("Finished streaming download for %s", file.file_name)
|
||||||
|
|
||||||
|
return stream_response()
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_upload_backup(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||||
|
backup: AgentBackup,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Upload a backup to Backblaze B2.
|
||||||
|
|
||||||
|
This involves uploading the main backup archive and a separate metadata JSON file.
|
||||||
|
"""
|
||||||
|
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||||
|
prefixed_tar_filename = self._prefix + tar_filename
|
||||||
|
prefixed_metadata_filename = self._prefix + metadata_filename
|
||||||
|
|
||||||
|
metadata_content_bytes = json.dumps(
|
||||||
|
{
|
||||||
|
"metadata_version": METADATA_VERSION,
|
||||||
|
"backup_id": backup.backup_id,
|
||||||
|
"backup_metadata": backup.as_dict(),
|
||||||
|
}
|
||||||
|
).encode("utf-8")
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Uploading backup: %s, and metadata: %s",
|
||||||
|
prefixed_tar_filename,
|
||||||
|
prefixed_metadata_filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
upload_successful = False
|
||||||
|
try:
|
||||||
|
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Main backup file upload finished for %s", prefixed_tar_filename
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.debug("Uploading metadata file: %s", prefixed_metadata_filename)
|
||||||
|
await self._upload_metadata_file(
|
||||||
|
metadata_content_bytes, prefixed_metadata_filename
|
||||||
|
)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Metadata file upload finished for %s", prefixed_metadata_filename
|
||||||
|
)
|
||||||
|
upload_successful = True
|
||||||
|
finally:
|
||||||
|
if upload_successful:
|
||||||
|
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
|
||||||
|
self._invalidate_caches(
|
||||||
|
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await self._cleanup_failed_upload(prefixed_tar_filename)
|
||||||
|
|
||||||
|
def _upload_metadata_file_sync(
|
||||||
|
self, metadata_content: bytes, filename: str
|
||||||
|
) -> None:
|
||||||
|
"""Synchronously upload metadata file to B2."""
|
||||||
|
self._bucket.upload_bytes(
|
||||||
|
metadata_content,
|
||||||
|
filename,
|
||||||
|
content_type="application/json",
|
||||||
|
file_info={"metadata_only": "true"},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _upload_metadata_file(
|
||||||
|
self, metadata_content: bytes, filename: str
|
||||||
|
) -> None:
|
||||||
|
"""Upload metadata file to B2."""
|
||||||
|
await self._hass.async_add_executor_job(
|
||||||
|
self._upload_metadata_file_sync,
|
||||||
|
metadata_content,
|
||||||
|
filename,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _upload_unbound_stream_sync(
|
||||||
|
self,
|
||||||
|
reader: AsyncIteratorReader,
|
||||||
|
filename: str,
|
||||||
|
content_type: str,
|
||||||
|
file_info: dict[str, Any],
|
||||||
|
) -> FileVersion:
|
||||||
|
"""Synchronously upload unbound stream to B2."""
|
||||||
|
return self._bucket.upload_unbound_stream(
|
||||||
|
reader,
|
||||||
|
filename,
|
||||||
|
content_type=content_type,
|
||||||
|
file_info=file_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _download_and_parse_metadata_sync(
|
||||||
|
self, metadata_file_version: FileVersion
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Synchronously download and parse metadata file."""
|
||||||
|
return _parse_metadata(
|
||||||
|
metadata_file_version.download().response.content.decode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _upload_backup_file(
|
||||||
|
self,
|
||||||
|
filename: str,
|
||||||
|
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||||
|
file_info: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
"""Upload backup file to B2 using streaming."""
|
||||||
|
_LOGGER.debug("Starting streaming upload for %s", filename)
|
||||||
|
|
||||||
|
stream = await open_stream()
|
||||||
|
reader = AsyncIteratorReader(self._hass.loop, stream)
|
||||||
|
|
||||||
|
_LOGGER.debug("Uploading backup file %s with streaming", filename)
|
||||||
|
try:
|
||||||
|
content_type, _ = mimetypes.guess_type(filename)
|
||||||
|
file_version = await self._hass.async_add_executor_job(
|
||||||
|
self._upload_unbound_stream_sync,
|
||||||
|
reader,
|
||||||
|
filename,
|
||||||
|
content_type or "application/x-tar",
|
||||||
|
file_info,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
reader.close()
|
||||||
|
|
||||||
|
_LOGGER.debug("Successfully uploaded %s (ID: %s)", filename, file_version.id_)
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||||
|
"""Delete a backup and its associated metadata file from Backblaze B2."""
|
||||||
|
file, metadata_file = await self._find_file_and_metadata_version_by_id(
|
||||||
|
backup_id
|
||||||
|
)
|
||||||
|
if not file:
|
||||||
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
|
# Invariant: when file is not None, metadata_file is also not None
|
||||||
|
assert metadata_file is not None
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Deleting backup file: %s and metadata file: %s",
|
||||||
|
file.file_name,
|
||||||
|
metadata_file.file_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._hass.async_add_executor_job(file.delete)
|
||||||
|
await self._hass.async_add_executor_job(metadata_file.delete)
|
||||||
|
|
||||||
|
self._invalidate_caches(
|
||||||
|
backup_id,
|
||||||
|
file.file_name,
|
||||||
|
metadata_file.file_name,
|
||||||
|
remove_files=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||||
|
"""List all backups by finding their associated metadata files in Backblaze B2."""
|
||||||
|
async with self._backup_list_cache_lock:
|
||||||
|
if self._backup_list_cache and self._is_cache_valid(
|
||||||
|
self._backup_list_cache_expiration
|
||||||
|
):
|
||||||
|
_LOGGER.debug("Returning backups from cache")
|
||||||
|
return list(self._backup_list_cache.values())
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Cache expired or empty, fetching all files from B2 to build backup list"
|
||||||
|
)
|
||||||
|
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Files found in prefix '%s': %s",
|
||||||
|
self._prefix,
|
||||||
|
list(all_files_in_prefix.keys()),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process metadata files sequentially to avoid exhausting executor pool
|
||||||
|
backups = {}
|
||||||
|
for file_name, file_version in all_files_in_prefix.items():
|
||||||
|
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||||
|
backup = await self._hass.async_add_executor_job(
|
||||||
|
self._process_metadata_file_sync,
|
||||||
|
file_name,
|
||||||
|
file_version,
|
||||||
|
all_files_in_prefix,
|
||||||
|
)
|
||||||
|
if backup:
|
||||||
|
backups[backup.backup_id] = backup
|
||||||
|
self._backup_list_cache = backups
|
||||||
|
self._backup_list_cache_expiration = time() + CACHE_TTL
|
||||||
|
|
||||||
|
return list(backups.values())
|
||||||
|
|
||||||
|
@handle_b2_errors
|
||||||
|
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
|
||||||
|
"""Get a specific backup by its ID from Backblaze B2."""
|
||||||
|
if self._backup_list_cache and self._is_cache_valid(
|
||||||
|
self._backup_list_cache_expiration
|
||||||
|
):
|
||||||
|
if backup := self._backup_list_cache.get(backup_id):
|
||||||
|
_LOGGER.debug("Returning backup %s from cache", backup_id)
|
||||||
|
return backup
|
||||||
|
|
||||||
|
file, metadata_file_version = await self._find_file_and_metadata_version_by_id(
|
||||||
|
backup_id
|
||||||
|
)
|
||||||
|
if not file or not metadata_file_version:
|
||||||
|
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||||
|
|
||||||
|
metadata_content = await self._hass.async_add_executor_job(
|
||||||
|
self._download_and_parse_metadata_sync,
|
||||||
|
metadata_file_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Successfully retrieved metadata for backup ID %s from file %s",
|
||||||
|
backup_id,
|
||||||
|
metadata_file_version.file_name,
|
||||||
|
)
|
||||||
|
backup = _create_backup_from_metadata(metadata_content, file)
|
||||||
|
|
||||||
|
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||||
|
self._backup_list_cache[backup.backup_id] = backup
|
||||||
|
|
||||||
|
return backup
|
||||||
|
|
||||||
|
async def _find_file_and_metadata_version_by_id(
|
||||||
|
self, backup_id: str
|
||||||
|
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||||
|
"""Find the main backup file and its associated metadata file version by backup ID."""
|
||||||
|
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||||
|
|
||||||
|
# Process metadata files sequentially to avoid exhausting executor pool
|
||||||
|
for file_name, file_version in all_files_in_prefix.items():
|
||||||
|
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||||
|
(
|
||||||
|
result_backup_file,
|
||||||
|
result_metadata_file_version,
|
||||||
|
) = await self._hass.async_add_executor_job(
|
||||||
|
self._process_metadata_file_for_id_sync,
|
||||||
|
file_name,
|
||||||
|
file_version,
|
||||||
|
backup_id,
|
||||||
|
all_files_in_prefix,
|
||||||
|
)
|
||||||
|
if result_backup_file and result_metadata_file_version:
|
||||||
|
return result_backup_file, result_metadata_file_version
|
||||||
|
|
||||||
|
_LOGGER.debug("Backup %s not found", backup_id)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def _process_metadata_file_for_id_sync(
|
||||||
|
self,
|
||||||
|
file_name: str,
|
||||||
|
file_version: FileVersion,
|
||||||
|
target_backup_id: str,
|
||||||
|
all_files_in_prefix: dict[str, FileVersion],
|
||||||
|
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||||
|
"""Synchronously process a single metadata file for a specific backup ID.
|
||||||
|
|
||||||
|
Called within a thread pool executor.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
download_response = file_version.download().response
|
||||||
|
except B2Error as err:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Failed to download metadata file %s during ID search: %s",
|
||||||
|
file_name,
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata_content = _parse_metadata(
|
||||||
|
download_response.content.decode("utf-8")
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
if metadata_content["backup_id"] != target_backup_id:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Metadata file %s does not match target backup ID %s",
|
||||||
|
file_name,
|
||||||
|
target_backup_id,
|
||||||
|
)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
found_backup_file = _find_backup_file_for_metadata(
|
||||||
|
file_name, all_files_in_prefix, self._prefix
|
||||||
|
)
|
||||||
|
if not found_backup_file:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Found metadata file %s for backup ID %s, but no corresponding backup file",
|
||||||
|
file_name,
|
||||||
|
target_backup_id,
|
||||||
|
)
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Found backup file %s and metadata file %s for ID %s",
|
||||||
|
found_backup_file.file_name,
|
||||||
|
file_name,
|
||||||
|
target_backup_id,
|
||||||
|
)
|
||||||
|
return found_backup_file, file_version
|
||||||
|
|
||||||
|
async def _get_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||||
|
"""Get all file versions in the configured prefix from Backblaze B2.
|
||||||
|
|
||||||
|
Uses a cache to minimize API calls.
|
||||||
|
|
||||||
|
This fetches a flat list of all files, including main backups and metadata files.
|
||||||
|
"""
|
||||||
|
async with self._all_files_cache_lock:
|
||||||
|
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||||
|
_LOGGER.debug("Returning all files from cache")
|
||||||
|
return self._all_files_cache
|
||||||
|
|
||||||
|
_LOGGER.debug("Cache for all files expired or empty, fetching from B2")
|
||||||
|
all_files_in_prefix = await self._hass.async_add_executor_job(
|
||||||
|
self._fetch_all_files_in_prefix
|
||||||
|
)
|
||||||
|
self._all_files_cache = all_files_in_prefix
|
||||||
|
self._all_files_cache_expiration = time() + CACHE_TTL
|
||||||
|
return all_files_in_prefix
|
||||||
|
|
||||||
|
def _fetch_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||||
|
"""Fetch all files in the configured prefix from B2."""
|
||||||
|
all_files: dict[str, FileVersion] = {}
|
||||||
|
for file, _ in self._bucket.ls(self._prefix):
|
||||||
|
all_files[file.file_name] = file
|
||||||
|
return all_files
|
||||||
|
|
||||||
|
def _process_metadata_file_sync(
|
||||||
|
self,
|
||||||
|
file_name: str,
|
||||||
|
file_version: FileVersion,
|
||||||
|
all_files_in_prefix: dict[str, FileVersion],
|
||||||
|
) -> AgentBackup | None:
|
||||||
|
"""Synchronously process a single metadata file and return an AgentBackup if valid."""
|
||||||
|
try:
|
||||||
|
download_response = file_version.download().response
|
||||||
|
except B2Error as err:
|
||||||
|
_LOGGER.warning("Failed to download metadata file %s: %s", file_name, err)
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata_content = _parse_metadata(
|
||||||
|
download_response.content.decode("utf-8")
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
found_backup_file = _find_backup_file_for_metadata(
|
||||||
|
file_name, all_files_in_prefix, self._prefix
|
||||||
|
)
|
||||||
|
if not found_backup_file:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Found metadata file %s but no corresponding backup file",
|
||||||
|
file_name,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Successfully processed metadata file %s for backup ID %s",
|
||||||
|
file_name,
|
||||||
|
metadata_content["backup_id"],
|
||||||
|
)
|
||||||
|
return _create_backup_from_metadata(metadata_content, found_backup_file)
|
||||||
|
|
||||||
|
def _invalidate_caches(
|
||||||
|
self,
|
||||||
|
backup_id: str,
|
||||||
|
tar_filename: str,
|
||||||
|
metadata_filename: str | None,
|
||||||
|
*,
|
||||||
|
remove_files: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Invalidate caches after upload/deletion operations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
backup_id: The backup ID to remove from backup cache
|
||||||
|
tar_filename: The tar filename to remove from files cache
|
||||||
|
metadata_filename: The metadata filename to remove from files cache
|
||||||
|
remove_files: If True, remove specific files from cache; if False, expire entire cache
|
||||||
|
"""
|
||||||
|
if remove_files:
|
||||||
|
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||||
|
self._all_files_cache.pop(tar_filename, None)
|
||||||
|
if metadata_filename:
|
||||||
|
self._all_files_cache.pop(metadata_filename, None)
|
||||||
|
|
||||||
|
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||||
|
self._backup_list_cache.pop(backup_id, None)
|
||||||
|
else:
|
||||||
|
# For uploads, we can't easily add new FileVersion objects without API calls,
|
||||||
|
# so we expire the entire cache for simplicity
|
||||||
|
self._all_files_cache_expiration = 0.0
|
||||||
|
self._backup_list_cache_expiration = 0.0
|
||||||
288
homeassistant/components/backblaze_b2/config_flow.py
Normal file
288
homeassistant/components/backblaze_b2/config_flow.py
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
"""Config flow for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from b2sdk.v2 import B2Api, InMemoryAccountInfo, exception
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.selector import (
|
||||||
|
TextSelector,
|
||||||
|
TextSelectorConfig,
|
||||||
|
TextSelectorType,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
BACKBLAZE_REALM,
|
||||||
|
CONF_APPLICATION_KEY,
|
||||||
|
CONF_BUCKET,
|
||||||
|
CONF_KEY_ID,
|
||||||
|
CONF_PREFIX,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
REQUIRED_CAPABILITIES = {"writeFiles", "listFiles", "deleteFiles", "readFiles"}
|
||||||
|
|
||||||
|
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_KEY_ID): cv.string,
|
||||||
|
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||||
|
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||||
|
),
|
||||||
|
vol.Required(CONF_BUCKET): cv.string,
|
||||||
|
vol.Optional(CONF_PREFIX, default=""): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Backblaze B2."""
|
||||||
|
|
||||||
|
VERSION = 1
|
||||||
|
|
||||||
|
reauth_entry: ConfigEntry[Any] | None
|
||||||
|
|
||||||
|
def _abort_if_duplicate_credentials(self, user_input: dict[str, Any]) -> None:
|
||||||
|
"""Abort if credentials already exist in another entry."""
|
||||||
|
self._async_abort_entries_match(
|
||||||
|
{
|
||||||
|
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||||
|
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle a flow initiated by the user."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
placeholders: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._abort_if_duplicate_credentials(user_input)
|
||||||
|
|
||||||
|
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||||
|
user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||||
|
"/"
|
||||||
|
):
|
||||||
|
user_input[CONF_PREFIX] += "/"
|
||||||
|
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_input[CONF_BUCKET], data=user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
STEP_USER_DATA_SCHEMA, user_input
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_validate_backblaze_connection(
|
||||||
|
self, user_input: dict[str, Any]
|
||||||
|
) -> tuple[dict[str, str], dict[str, str]]:
|
||||||
|
"""Validate Backblaze B2 credentials, bucket, capabilities, and prefix.
|
||||||
|
|
||||||
|
Returns a tuple of (errors_dict, placeholders_dict).
|
||||||
|
"""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
placeholders: dict[str, str] = {}
|
||||||
|
|
||||||
|
info = InMemoryAccountInfo()
|
||||||
|
b2_api = B2Api(info)
|
||||||
|
|
||||||
|
def _authorize_and_get_bucket_sync() -> None:
|
||||||
|
"""Synchronously authorize the account and get the bucket by name.
|
||||||
|
|
||||||
|
This function is run in the executor because b2sdk operations are blocking.
|
||||||
|
"""
|
||||||
|
b2_api.authorize_account(
|
||||||
|
BACKBLAZE_REALM, # Use the defined realm constant
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
user_input[CONF_APPLICATION_KEY],
|
||||||
|
)
|
||||||
|
b2_api.get_bucket_by_name(user_input[CONF_BUCKET])
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||||
|
|
||||||
|
allowed = b2_api.account_info.get_allowed()
|
||||||
|
|
||||||
|
# Check if allowed info is available
|
||||||
|
if allowed is None or not allowed.get("capabilities"):
|
||||||
|
errors["base"] = "invalid_capability"
|
||||||
|
placeholders["missing_capabilities"] = ", ".join(
|
||||||
|
sorted(REQUIRED_CAPABILITIES)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Check if all required capabilities are present
|
||||||
|
current_caps = set(allowed["capabilities"])
|
||||||
|
if not REQUIRED_CAPABILITIES.issubset(current_caps):
|
||||||
|
missing_caps = REQUIRED_CAPABILITIES - current_caps
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Missing required Backblaze B2 capabilities for Key ID '%s': %s",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
", ".join(sorted(missing_caps)),
|
||||||
|
)
|
||||||
|
errors["base"] = "invalid_capability"
|
||||||
|
placeholders["missing_capabilities"] = ", ".join(
|
||||||
|
sorted(missing_caps)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Only check prefix if capabilities are valid
|
||||||
|
configured_prefix: str = user_input[CONF_PREFIX]
|
||||||
|
allowed_prefix = allowed.get("namePrefix") or ""
|
||||||
|
# Ensure configured prefix starts with Backblaze B2's allowed prefix
|
||||||
|
if allowed_prefix and not configured_prefix.startswith(
|
||||||
|
allowed_prefix
|
||||||
|
):
|
||||||
|
errors[CONF_PREFIX] = "invalid_prefix"
|
||||||
|
placeholders["allowed_prefix"] = allowed_prefix
|
||||||
|
|
||||||
|
except exception.Unauthorized:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Backblaze B2 authentication failed for Key ID '%s'",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
)
|
||||||
|
errors["base"] = "invalid_credentials"
|
||||||
|
except exception.RestrictedBucket as err:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Access to Backblaze B2 bucket '%s' is restricted: %s",
|
||||||
|
user_input[CONF_BUCKET],
|
||||||
|
err,
|
||||||
|
)
|
||||||
|
placeholders["restricted_bucket_name"] = err.bucket_name
|
||||||
|
errors[CONF_BUCKET] = "restricted_bucket"
|
||||||
|
except exception.NonExistentBucket:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||||
|
)
|
||||||
|
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||||
|
except exception.ConnectionReset:
|
||||||
|
_LOGGER.error("Failed to connect to Backblaze B2. Connection reset")
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except exception.MissingAccountData:
|
||||||
|
# This generally indicates an issue with how InMemoryAccountInfo is used
|
||||||
|
_LOGGER.error(
|
||||||
|
"Missing account data during Backblaze B2 authorization for Key ID '%s'",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
)
|
||||||
|
errors["base"] = "invalid_credentials"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception(
|
||||||
|
"An unexpected error occurred during Backblaze B2 configuration for Key ID '%s'",
|
||||||
|
user_input[CONF_KEY_ID],
|
||||||
|
)
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
|
||||||
|
return errors, placeholders
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauthentication flow."""
|
||||||
|
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||||
|
self.context["entry_id"]
|
||||||
|
)
|
||||||
|
assert self.reauth_entry is not None
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Confirm reauthentication."""
|
||||||
|
assert self.reauth_entry is not None
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
placeholders: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._abort_if_duplicate_credentials(user_input)
|
||||||
|
|
||||||
|
validation_input = {
|
||||||
|
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||||
|
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||||
|
CONF_BUCKET: self.reauth_entry.data[CONF_BUCKET],
|
||||||
|
CONF_PREFIX: self.reauth_entry.data[CONF_PREFIX],
|
||||||
|
}
|
||||||
|
|
||||||
|
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||||
|
validation_input
|
||||||
|
)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self.reauth_entry,
|
||||||
|
data_updates={
|
||||||
|
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||||
|
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_KEY_ID): cv.string,
|
||||||
|
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||||
|
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||||
|
),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={
|
||||||
|
"brand_name": "Backblaze B2",
|
||||||
|
"bucket": self.reauth_entry.data[CONF_BUCKET],
|
||||||
|
**placeholders,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reconfiguration flow."""
|
||||||
|
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||||
|
assert entry is not None
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
self._abort_if_duplicate_credentials(user_input)
|
||||||
|
|
||||||
|
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||||
|
user_input
|
||||||
|
)
|
||||||
|
|
||||||
|
if not errors:
|
||||||
|
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||||
|
"/"
|
||||||
|
):
|
||||||
|
user_input[CONF_PREFIX] += "/"
|
||||||
|
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
entry,
|
||||||
|
data_updates=user_input,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
errors = {}
|
||||||
|
placeholders = {}
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure",
|
||||||
|
data_schema=self.add_suggested_values_to_schema(
|
||||||
|
STEP_USER_DATA_SCHEMA, user_input or entry.data
|
||||||
|
),
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||||
|
)
|
||||||
22
homeassistant/components/backblaze_b2/const.py
Normal file
22
homeassistant/components/backblaze_b2/const.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
"""Constants for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from homeassistant.util.hass_dict import HassKey
|
||||||
|
|
||||||
|
DOMAIN: Final = "backblaze_b2"
|
||||||
|
|
||||||
|
CONF_KEY_ID = "key_id"
|
||||||
|
CONF_APPLICATION_KEY = "application_key"
|
||||||
|
CONF_BUCKET = "bucket"
|
||||||
|
CONF_PREFIX = "prefix"
|
||||||
|
|
||||||
|
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||||
|
f"{DOMAIN}.backup_agent_listeners"
|
||||||
|
)
|
||||||
|
|
||||||
|
METADATA_FILE_SUFFIX = ".metadata.json"
|
||||||
|
METADATA_VERSION = "1"
|
||||||
|
|
||||||
|
BACKBLAZE_REALM = "production"
|
||||||
56
homeassistant/components/backblaze_b2/diagnostics.py
Normal file
56
homeassistant/components/backblaze_b2/diagnostics.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""Diagnostics support for Backblaze B2."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from . import BackblazeConfigEntry
|
||||||
|
from .const import CONF_APPLICATION_KEY, CONF_KEY_ID
|
||||||
|
|
||||||
|
TO_REDACT_ENTRY_DATA = {CONF_APPLICATION_KEY, CONF_KEY_ID}
|
||||||
|
TO_REDACT_ACCOUNT_DATA_ALLOWED = {"bucketId", "bucketName", "namePrefix"}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, entry: BackblazeConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
bucket = entry.runtime_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
bucket_info = {
|
||||||
|
"name": bucket.name,
|
||||||
|
"id": bucket.id_,
|
||||||
|
"type": bucket.type_,
|
||||||
|
"cors_rules": bucket.cors_rules,
|
||||||
|
"lifecycle_rules": bucket.lifecycle_rules,
|
||||||
|
"revision": bucket.revision,
|
||||||
|
}
|
||||||
|
|
||||||
|
account_info = bucket.api.account_info
|
||||||
|
account_data: dict[str, Any] = {
|
||||||
|
"account_id": account_info.get_account_id(),
|
||||||
|
"api_url": account_info.get_api_url(),
|
||||||
|
"download_url": account_info.get_download_url(),
|
||||||
|
"minimum_part_size": account_info.get_minimum_part_size(),
|
||||||
|
"allowed": account_info.get_allowed(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if isinstance(account_data["allowed"], dict):
|
||||||
|
account_data["allowed"] = async_redact_data(
|
||||||
|
account_data["allowed"], TO_REDACT_ACCOUNT_DATA_ALLOWED
|
||||||
|
)
|
||||||
|
|
||||||
|
except (AttributeError, TypeError, ValueError, KeyError):
|
||||||
|
bucket_info = {"name": "unknown", "id": "unknown"}
|
||||||
|
account_data = {"error": "Failed to retrieve detailed account information"}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"entry_data": async_redact_data(entry.data, TO_REDACT_ENTRY_DATA),
|
||||||
|
"entry_options": entry.options,
|
||||||
|
"bucket_info": bucket_info,
|
||||||
|
"account_info": account_data,
|
||||||
|
}
|
||||||
12
homeassistant/components/backblaze_b2/manifest.json
Normal file
12
homeassistant/components/backblaze_b2/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"domain": "backblaze_b2",
|
||||||
|
"name": "Backblaze B2",
|
||||||
|
"codeowners": ["@hugo-vrijswijk", "@ElCruncharino"],
|
||||||
|
"config_flow": true,
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/backblaze_b2",
|
||||||
|
"integration_type": "service",
|
||||||
|
"iot_class": "cloud_push",
|
||||||
|
"loggers": ["b2sdk"],
|
||||||
|
"quality_scale": "bronze",
|
||||||
|
"requirements": ["b2sdk==2.8.1"]
|
||||||
|
}
|
||||||
124
homeassistant/components/backblaze_b2/quality_scale.yaml
Normal file
124
homeassistant/components/backblaze_b2/quality_scale.yaml
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not register custom actions.
|
||||||
|
appropriate-polling:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not poll.
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have any custom actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: Entities of this integration do not explicitly subscribe to events.
|
||||||
|
entity-unique-id:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not have entities.
|
||||||
|
has-entity-name:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration does not have entities.
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: Integration does not register custom actions.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have an options flow.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
parallel-updates:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not poll.
|
||||||
|
reauthentication-flow: done
|
||||||
|
test-coverage: done
|
||||||
|
|
||||||
|
# Gold
|
||||||
|
devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||||
|
discovery:
|
||||||
|
status: exempt
|
||||||
|
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||||
|
docs-data-update:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not poll.
|
||||||
|
docs-examples:
|
||||||
|
status: exempt
|
||||||
|
comment: The integration extends core functionality and does not require examples.
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not support physical devices.
|
||||||
|
docs-supported-functions:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
docs-troubleshooting: todo
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have devices.
|
||||||
|
entity-category:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
entity-device-class:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
entity-disabled-by-default:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
entity-translations:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have entities.
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not use icons.
|
||||||
|
reconfiguration-flow: done
|
||||||
|
repair-issues: done
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have devices.
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The b2sdk library is synchronous by design. All sync operations are properly
|
||||||
|
wrapped with async_add_executor_job to prevent blocking the event loop.
|
||||||
|
inject-websession:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The b2sdk library does not support custom HTTP session injection.
|
||||||
|
It manages HTTP connections internally through its own session management.
|
||||||
|
strict-typing:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The b2sdk dependency does not include a py.typed file and is not PEP 561 compliant.
|
||||||
|
This is outside the integration's control as it's a third-party library requirement.
|
||||||
93
homeassistant/components/backblaze_b2/repairs.py
Normal file
93
homeassistant/components/backblaze_b2/repairs.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
"""Repair issues for the Backblaze B2 integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from b2sdk.v2.exception import (
|
||||||
|
B2Error,
|
||||||
|
NonExistentBucket,
|
||||||
|
RestrictedBucket,
|
||||||
|
Unauthorized,
|
||||||
|
)
|
||||||
|
|
||||||
|
from homeassistant.components.repairs import ConfirmRepairFlow
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import issue_registry as ir
|
||||||
|
|
||||||
|
from .const import CONF_BUCKET, DOMAIN
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ISSUE_BUCKET_ACCESS_RESTRICTED = "bucket_access_restricted"
|
||||||
|
ISSUE_BUCKET_NOT_FOUND = "bucket_not_found"
|
||||||
|
|
||||||
|
|
||||||
|
def _create_issue(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: ConfigEntry,
|
||||||
|
issue_type: str,
|
||||||
|
bucket_name: str,
|
||||||
|
) -> None:
|
||||||
|
"""Create a repair issue with standard parameters."""
|
||||||
|
ir.async_create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
f"{issue_type}_{entry.entry_id}",
|
||||||
|
is_fixable=False,
|
||||||
|
issue_domain=DOMAIN,
|
||||||
|
severity=ir.IssueSeverity.ERROR,
|
||||||
|
translation_key=issue_type,
|
||||||
|
translation_placeholders={
|
||||||
|
"brand_name": "Backblaze B2",
|
||||||
|
"title": entry.title,
|
||||||
|
"bucket_name": bucket_name,
|
||||||
|
"entry_id": entry.entry_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_bucket_access_restricted_issue(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||||
|
) -> None:
|
||||||
|
"""Create a repair issue for restricted bucket access."""
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, bucket_name)
|
||||||
|
|
||||||
|
|
||||||
|
def create_bucket_not_found_issue(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||||
|
) -> None:
|
||||||
|
"""Create a repair issue for non-existent bucket."""
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, bucket_name)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_check_for_repair_issues(
|
||||||
|
hass: HomeAssistant, entry: ConfigEntry
|
||||||
|
) -> None:
|
||||||
|
"""Check for common issues that require user action."""
|
||||||
|
bucket = entry.runtime_data
|
||||||
|
restricted_issue_id = f"{ISSUE_BUCKET_ACCESS_RESTRICTED}_{entry.entry_id}"
|
||||||
|
not_found_issue_id = f"{ISSUE_BUCKET_NOT_FOUND}_{entry.entry_id}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
await hass.async_add_executor_job(bucket.api.account_info.get_allowed)
|
||||||
|
ir.async_delete_issue(hass, DOMAIN, restricted_issue_id)
|
||||||
|
ir.async_delete_issue(hass, DOMAIN, not_found_issue_id)
|
||||||
|
except Unauthorized:
|
||||||
|
entry.async_start_reauth(hass)
|
||||||
|
except RestrictedBucket as err:
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, err.bucket_name)
|
||||||
|
except NonExistentBucket:
|
||||||
|
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, entry.data[CONF_BUCKET])
|
||||||
|
except B2Error as err:
|
||||||
|
_LOGGER.debug("B2 connectivity test failed: %s", err)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_create_fix_flow(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
issue_id: str,
|
||||||
|
data: dict[str, str | int | float | None] | None,
|
||||||
|
) -> ConfirmRepairFlow:
|
||||||
|
"""Create a fix flow for Backblaze B2 issues."""
|
||||||
|
return ConfirmRepairFlow()
|
||||||
92
homeassistant/components/backblaze_b2/strings.json
Normal file
92
homeassistant/components/backblaze_b2/strings.json
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"abort": {
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
|
||||||
|
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
|
||||||
|
"invalid_credentials": "[%key:component::backblaze_b2::exceptions::invalid_credentials::message%]",
|
||||||
|
"invalid_prefix": "[%key:component::backblaze_b2::exceptions::invalid_prefix::message%]",
|
||||||
|
"restricted_bucket": "[%key:component::backblaze_b2::exceptions::restricted_bucket::message%]",
|
||||||
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
|
},
|
||||||
|
"step": {
|
||||||
|
"reauth_confirm": {
|
||||||
|
"data": {
|
||||||
|
"application_key": "Application key",
|
||||||
|
"key_id": "Key ID"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"application_key": "Application key to connect to {brand_name}",
|
||||||
|
"key_id": "Key ID to connect to {brand_name}"
|
||||||
|
},
|
||||||
|
"description": "Update your {brand_name} credentials for bucket {bucket}.",
|
||||||
|
"title": "Reauthenticate {brand_name}"
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"application_key": "Application key",
|
||||||
|
"bucket": "Bucket name",
|
||||||
|
"key_id": "Key ID",
|
||||||
|
"prefix": "Folder prefix (optional)"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"application_key": "Application key to connect to {brand_name}",
|
||||||
|
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||||
|
"key_id": "Key ID to connect to {brand_name}",
|
||||||
|
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||||
|
},
|
||||||
|
"title": "Reconfigure {brand_name}"
|
||||||
|
},
|
||||||
|
"user": {
|
||||||
|
"data": {
|
||||||
|
"application_key": "Application key",
|
||||||
|
"bucket": "Bucket name",
|
||||||
|
"key_id": "Key ID",
|
||||||
|
"prefix": "Folder prefix (optional)"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"application_key": "Application key to connect to {brand_name}",
|
||||||
|
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||||
|
"key_id": "Key ID to connect to {brand_name}",
|
||||||
|
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||||
|
},
|
||||||
|
"title": "Add {brand_name} backup"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"exceptions": {
|
||||||
|
"cannot_connect": {
|
||||||
|
"message": "Cannot connect to endpoint"
|
||||||
|
},
|
||||||
|
"invalid_bucket_name": {
|
||||||
|
"message": "Bucket does not exist or is not writable by the provided credentials."
|
||||||
|
},
|
||||||
|
"invalid_capability": {
|
||||||
|
"message": "Application key does not have the required read/write capabilities."
|
||||||
|
},
|
||||||
|
"invalid_credentials": {
|
||||||
|
"message": "Bucket cannot be accessed using provided of key ID and application key."
|
||||||
|
},
|
||||||
|
"invalid_prefix": {
|
||||||
|
"message": "Prefix is not allowed for provided key. Must start with {allowed_prefix}."
|
||||||
|
},
|
||||||
|
"restricted_bucket": {
|
||||||
|
"message": "Application key is restricted to bucket {restricted_bucket_name}."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"issues": {
|
||||||
|
"bucket_access_restricted": {
|
||||||
|
"description": "Access to your {brand_name} bucket {bucket_name} is restricted for the current credentials. This means your application key may only have access to specific buckets, but not this one. To fix this issue:\n\n1. Log in to your {brand_name} account\n2. Check your application key restrictions\n3. Either use a different bucket that your key can access, or create a new application key with access to {bucket_name}\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||||
|
"title": "{brand_name} bucket access restricted"
|
||||||
|
},
|
||||||
|
"bucket_not_found": {
|
||||||
|
"description": "The {brand_name} bucket {bucket_name} cannot be found or accessed. This could mean:\n\n1. The bucket was deleted\n2. The bucket name was changed\n3. Your credentials no longer have access to this bucket\n\nTo fix this issue:\n\n1. Log in to your {brand_name} account\n2. Verify the bucket still exists and check its name\n3. Ensure your application key has access to this bucket\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||||
|
"title": "{brand_name} bucket not found"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,6 +8,6 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "calculated",
|
"iot_class": "calculated",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
"""The blueprint integration."""
|
"""The blueprint integration."""
|
||||||
|
|
||||||
|
from homeassistant.const import Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
from homeassistant.helpers.discovery import async_load_platform
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
|
||||||
from . import websocket_api
|
from . import websocket_api
|
||||||
@@ -28,4 +30,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
|||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the blueprint integration."""
|
"""Set up the blueprint integration."""
|
||||||
websocket_api.async_setup(hass)
|
websocket_api.async_setup(hass)
|
||||||
|
hass.async_create_task(
|
||||||
|
async_load_platform(hass, Platform.UPDATE, DOMAIN, None, config)
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -204,8 +204,8 @@ class DomainBlueprints:
|
|||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.domain = domain
|
self.domain = domain
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self._blueprint_in_use = blueprint_in_use
|
self.blueprint_in_use = blueprint_in_use
|
||||||
self._reload_blueprint_consumers = reload_blueprint_consumers
|
self.reload_blueprint_consumers = reload_blueprint_consumers
|
||||||
self._blueprints: dict[str, Blueprint | None] = {}
|
self._blueprints: dict[str, Blueprint | None] = {}
|
||||||
self._load_lock = asyncio.Lock()
|
self._load_lock = asyncio.Lock()
|
||||||
self._blueprint_schema = blueprint_schema
|
self._blueprint_schema = blueprint_schema
|
||||||
@@ -325,7 +325,7 @@ class DomainBlueprints:
|
|||||||
|
|
||||||
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
async def async_remove_blueprint(self, blueprint_path: str) -> None:
|
||||||
"""Remove a blueprint file."""
|
"""Remove a blueprint file."""
|
||||||
if self._blueprint_in_use(self.hass, blueprint_path):
|
if self.blueprint_in_use(self.hass, blueprint_path):
|
||||||
raise BlueprintInUse(self.domain, blueprint_path)
|
raise BlueprintInUse(self.domain, blueprint_path)
|
||||||
path = self.blueprint_folder / blueprint_path
|
path = self.blueprint_folder / blueprint_path
|
||||||
await self.hass.async_add_executor_job(path.unlink)
|
await self.hass.async_add_executor_job(path.unlink)
|
||||||
@@ -362,7 +362,7 @@ class DomainBlueprints:
|
|||||||
self._blueprints[blueprint_path] = blueprint
|
self._blueprints[blueprint_path] = blueprint
|
||||||
|
|
||||||
if overrides_existing:
|
if overrides_existing:
|
||||||
await self._reload_blueprint_consumers(self.hass, blueprint_path)
|
await self.reload_blueprint_consumers(self.hass, blueprint_path)
|
||||||
|
|
||||||
return overrides_existing
|
return overrides_existing
|
||||||
|
|
||||||
|
|||||||
293
homeassistant/components/blueprint/update.py
Normal file
293
homeassistant/components/blueprint/update.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
"""Update entities for blueprints."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import Any, Final
|
||||||
|
|
||||||
|
from homeassistant.components import automation, script
|
||||||
|
from . import importer, models
|
||||||
|
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||||
|
from homeassistant.const import CONF_SOURCE_URL
|
||||||
|
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
from homeassistant.helpers import event as event_helper
|
||||||
|
from homeassistant.helpers.entity import EntityCategory
|
||||||
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
|
|
||||||
|
from .const import DOMAIN as BLUEPRINT_DOMAIN
|
||||||
|
from .errors import BlueprintException
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_LATEST_VERSION_PLACEHOLDER: Final = "remote"
|
||||||
|
DATA_UPDATE_MANAGER: Final = "update_manager"
|
||||||
|
REFRESH_INTERVAL: Final = timedelta(days=1)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class BlueprintUsage:
|
||||||
|
"""Details about a blueprint currently in use."""
|
||||||
|
|
||||||
|
domain: str
|
||||||
|
path: str
|
||||||
|
domain_blueprints: models.DomainBlueprints
|
||||||
|
blueprint: models.Blueprint
|
||||||
|
entities: list[str]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_platform(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config: ConfigType,
|
||||||
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
discovery_info: DiscoveryInfoType | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the blueprint update platform."""
|
||||||
|
data = hass.data.setdefault(BLUEPRINT_DOMAIN, {})
|
||||||
|
|
||||||
|
if (manager := data.get(DATA_UPDATE_MANAGER)) is None:
|
||||||
|
manager = BlueprintUpdateManager(hass, async_add_entities)
|
||||||
|
data[DATA_UPDATE_MANAGER] = manager
|
||||||
|
await manager.async_start()
|
||||||
|
return
|
||||||
|
|
||||||
|
manager.replace_add_entities(async_add_entities)
|
||||||
|
await manager.async_recreate_entities()
|
||||||
|
|
||||||
|
|
||||||
|
class BlueprintUpdateManager:
|
||||||
|
"""Manage blueprint update entities based on blueprint usage."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, hass: HomeAssistant, async_add_entities: AddEntitiesCallback
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the manager."""
|
||||||
|
self.hass = hass
|
||||||
|
self._async_add_entities = async_add_entities
|
||||||
|
self._entities: dict[tuple[str, str], BlueprintUpdateEntity] = {}
|
||||||
|
self._lock = asyncio.Lock()
|
||||||
|
self._refresh_cancel: CALLBACK_TYPE | None = None
|
||||||
|
self._started = False
|
||||||
|
self._interval_unsub: CALLBACK_TYPE | None = None
|
||||||
|
|
||||||
|
async def async_start(self) -> None:
|
||||||
|
"""Start tracking blueprint usage."""
|
||||||
|
if self._started:
|
||||||
|
return
|
||||||
|
self._started = True
|
||||||
|
|
||||||
|
self._interval_unsub = event_helper.async_track_time_interval(
|
||||||
|
self.hass, self._handle_time_interval, REFRESH_INTERVAL
|
||||||
|
)
|
||||||
|
await self.async_refresh_entities()
|
||||||
|
|
||||||
|
def replace_add_entities(self, async_add_entities: AddEntitiesCallback) -> None:
|
||||||
|
"""Update the callback used to register entities."""
|
||||||
|
self._async_add_entities = async_add_entities
|
||||||
|
|
||||||
|
async def async_recreate_entities(self) -> None:
|
||||||
|
"""Recreate entities after the platform has been reloaded."""
|
||||||
|
async with self._lock:
|
||||||
|
entities = list(self._entities.values())
|
||||||
|
self._entities.clear()
|
||||||
|
|
||||||
|
for entity in entities:
|
||||||
|
await entity.async_remove()
|
||||||
|
|
||||||
|
await self.async_refresh_entities()
|
||||||
|
|
||||||
|
async def async_refresh_entities(self) -> None:
|
||||||
|
"""Refresh update entities based on current blueprint usage."""
|
||||||
|
async with self._lock:
|
||||||
|
usage_map = await self._async_collect_in_use_blueprints()
|
||||||
|
|
||||||
|
current_keys = set(self._entities)
|
||||||
|
new_keys = set(usage_map)
|
||||||
|
|
||||||
|
for key in current_keys - new_keys:
|
||||||
|
entity = self._entities.pop(key)
|
||||||
|
await entity.async_remove()
|
||||||
|
|
||||||
|
new_entities: list[BlueprintUpdateEntity] = []
|
||||||
|
|
||||||
|
for key in new_keys - current_keys:
|
||||||
|
usage = usage_map[key]
|
||||||
|
entity = BlueprintUpdateEntity(self, usage)
|
||||||
|
self._entities[key] = entity
|
||||||
|
new_entities.append(entity)
|
||||||
|
|
||||||
|
for key in new_keys & current_keys:
|
||||||
|
self._entities[key].update_usage(usage_map[key])
|
||||||
|
self._entities[key].async_write_ha_state()
|
||||||
|
|
||||||
|
if new_entities:
|
||||||
|
self._async_add_entities(new_entities)
|
||||||
|
|
||||||
|
def async_schedule_refresh(self) -> None:
|
||||||
|
"""Schedule an asynchronous refresh."""
|
||||||
|
if self._refresh_cancel is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._refresh_cancel = event_helper.async_call_later(
|
||||||
|
self.hass, 0, self._handle_scheduled_refresh
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_scheduled_refresh(self, _now: Any) -> None:
|
||||||
|
"""Run a scheduled refresh task."""
|
||||||
|
self._refresh_cancel = None
|
||||||
|
self.hass.async_create_task(self.async_refresh_entities())
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _handle_time_interval(self, _now: Any) -> None:
|
||||||
|
"""Handle scheduled interval refresh."""
|
||||||
|
self.async_schedule_refresh()
|
||||||
|
|
||||||
|
async def _async_collect_in_use_blueprints(self) -> dict[tuple[str, str], BlueprintUsage]:
|
||||||
|
"""Collect blueprint usage information for automations and scripts."""
|
||||||
|
|
||||||
|
usage_keys: set[tuple[str, str]] = set()
|
||||||
|
|
||||||
|
if automation.DATA_COMPONENT in self.hass.data:
|
||||||
|
component = self.hass.data[automation.DATA_COMPONENT]
|
||||||
|
for automation_entity in list(component.entities):
|
||||||
|
if (path := getattr(automation_entity, "referenced_blueprint", None)):
|
||||||
|
usage_keys.add((automation.DOMAIN, path))
|
||||||
|
|
||||||
|
if script.DOMAIN in self.hass.data:
|
||||||
|
component = self.hass.data[script.DOMAIN]
|
||||||
|
for script_entity in list(component.entities):
|
||||||
|
if (path := getattr(script_entity, "referenced_blueprint", None)):
|
||||||
|
usage_keys.add((script.DOMAIN, path))
|
||||||
|
|
||||||
|
domain_blueprints_map = self.hass.data.get(BLUEPRINT_DOMAIN, {})
|
||||||
|
usage_map: dict[tuple[str, str], BlueprintUsage] = {}
|
||||||
|
|
||||||
|
for domain, path in usage_keys:
|
||||||
|
domain_blueprints: models.DomainBlueprints | None = domain_blueprints_map.get(
|
||||||
|
domain
|
||||||
|
)
|
||||||
|
|
||||||
|
if domain_blueprints is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not domain_blueprints.blueprint_in_use(self.hass, path):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
blueprint = await domain_blueprints.async_get_blueprint(path)
|
||||||
|
except BlueprintException:
|
||||||
|
continue
|
||||||
|
|
||||||
|
source_url = blueprint.metadata.get(CONF_SOURCE_URL)
|
||||||
|
if not source_url:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if domain == automation.DOMAIN:
|
||||||
|
entities = automation.automations_with_blueprint(self.hass, path)
|
||||||
|
elif domain == script.DOMAIN:
|
||||||
|
entities = script.scripts_with_blueprint(self.hass, path)
|
||||||
|
else:
|
||||||
|
entities = []
|
||||||
|
|
||||||
|
usage_map[(domain, path)] = BlueprintUsage(
|
||||||
|
domain=domain,
|
||||||
|
path=path,
|
||||||
|
domain_blueprints=domain_blueprints,
|
||||||
|
blueprint=blueprint,
|
||||||
|
entities=entities,
|
||||||
|
)
|
||||||
|
|
||||||
|
return usage_map
|
||||||
|
|
||||||
|
|
||||||
|
class BlueprintUpdateEntity(UpdateEntity):
|
||||||
|
"""Define a blueprint update entity."""
|
||||||
|
|
||||||
|
_attr_entity_category = EntityCategory.CONFIG
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_should_poll = False
|
||||||
|
_attr_supported_features = UpdateEntityFeature.INSTALL
|
||||||
|
|
||||||
|
def __init__(self, manager: BlueprintUpdateManager, usage: BlueprintUsage) -> None:
|
||||||
|
"""Initialize the update entity."""
|
||||||
|
self._manager = manager
|
||||||
|
self._domain = usage.domain
|
||||||
|
self._path = usage.path
|
||||||
|
self._domain_blueprints = usage.domain_blueprints
|
||||||
|
self._blueprint = usage.blueprint
|
||||||
|
self._entities_in_use = usage.entities
|
||||||
|
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||||
|
self._attr_unique_id = f"{self._domain}:{self._path}"
|
||||||
|
self._attr_in_progress = False
|
||||||
|
|
||||||
|
self.update_usage(usage)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def update_usage(self, usage: BlueprintUsage) -> None:
|
||||||
|
"""Update the entity with latest usage information."""
|
||||||
|
self._domain_blueprints = usage.domain_blueprints
|
||||||
|
self._blueprint = usage.blueprint
|
||||||
|
self._entities_in_use = usage.entities
|
||||||
|
self._source_url = usage.blueprint.metadata.get(CONF_SOURCE_URL)
|
||||||
|
|
||||||
|
self._attr_name = usage.blueprint.name
|
||||||
|
self._attr_release_summary = usage.blueprint.metadata.get("description")
|
||||||
|
self._attr_installed_version = usage.blueprint.metadata.get("version")
|
||||||
|
self._attr_release_url = self._source_url
|
||||||
|
self._attr_available = self._source_url is not None
|
||||||
|
self._attr_latest_version = (
|
||||||
|
_LATEST_VERSION_PLACEHOLDER
|
||||||
|
if self._source_url is not None
|
||||||
|
else self._attr_installed_version
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_install(self, version: str | None, backup: bool) -> None:
|
||||||
|
"""Install (refresh) the blueprint from its source."""
|
||||||
|
if self._source_url is None:
|
||||||
|
raise HomeAssistantError("Blueprint does not define a source URL")
|
||||||
|
|
||||||
|
self._attr_in_progress = True
|
||||||
|
self.async_write_ha_state()
|
||||||
|
usage: BlueprintUsage | None = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
imported = await importer.fetch_blueprint_from_url(
|
||||||
|
self.hass, self._source_url
|
||||||
|
)
|
||||||
|
blueprint = imported.blueprint
|
||||||
|
|
||||||
|
if blueprint.domain != self._domain:
|
||||||
|
raise HomeAssistantError(
|
||||||
|
"Downloaded blueprint domain does not match the existing blueprint"
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._domain_blueprints.async_add_blueprint(
|
||||||
|
blueprint, self._path, allow_override=True
|
||||||
|
)
|
||||||
|
|
||||||
|
usage = BlueprintUsage(
|
||||||
|
domain=self._domain,
|
||||||
|
path=self._path,
|
||||||
|
domain_blueprints=self._domain_blueprints,
|
||||||
|
blueprint=blueprint,
|
||||||
|
entities=self._entities_in_use,
|
||||||
|
)
|
||||||
|
|
||||||
|
except HomeAssistantError:
|
||||||
|
raise
|
||||||
|
except Exception as err: # noqa: BLE001 - Provide context for unexpected errors
|
||||||
|
raise HomeAssistantError("Failed to update blueprint from source") from err
|
||||||
|
finally:
|
||||||
|
self._attr_in_progress = False
|
||||||
|
|
||||||
|
if usage is not None:
|
||||||
|
self.update_usage(usage)
|
||||||
|
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
self._manager.async_schedule_refresh()
|
||||||
@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
|
|||||||
return HVACAction.HEATING
|
return HVACAction.HEATING
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="failed_to_parse_hvac_action",
|
translation_key="failed_to_parse_hvac_mode",
|
||||||
translation_placeholders={
|
translation_placeholders={
|
||||||
"mode_and_active": mode_and_active,
|
"mode_and_active": mode_and_active,
|
||||||
"current_temperature": str(self.current_temperature),
|
"current_temperature": str(self.current_temperature),
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
"failed_to_parse_hvac_action": {
|
"failed_to_parse_hvac_action": {
|
||||||
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
|
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
|
||||||
},
|
},
|
||||||
"failed_to_parse_hvac_mode": {
|
"failed_to_parse_hvac_mode": {
|
||||||
"message": "Cannot parse response to HVACMode: {mode}"
|
"message": "Cannot parse response to HVACMode: {mode}"
|
||||||
|
|||||||
@@ -74,11 +74,8 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
|||||||
super().__init__(data.fast_coordinator, data)
|
super().__init__(data.fast_coordinator, data)
|
||||||
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate"
|
||||||
|
|
||||||
# Set temperature range if available, otherwise use Home Assistant defaults
|
self._attr_min_temp = data.static.min_temp.value
|
||||||
if data.static.min_temp is not None and data.static.min_temp.value is not None:
|
self._attr_max_temp = data.static.max_temp.value
|
||||||
self._attr_min_temp = data.static.min_temp.value
|
|
||||||
if data.static.max_temp is not None and data.static.max_temp.value is not None:
|
|
||||||
self._attr_max_temp = data.static.max_temp.value
|
|
||||||
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
self._attr_temperature_unit = data.fast_coordinator.client.get_temperature_unit
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["bsblan"],
|
"loggers": ["bsblan"],
|
||||||
"requirements": ["python-bsblan==3.1.1"],
|
"requirements": ["python-bsblan==3.1.0"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"name": "bsb-lan*",
|
"name": "bsb-lan*",
|
||||||
|
|||||||
@@ -57,9 +57,9 @@ async def _async_reproduce_states(
|
|||||||
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
|
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
|
||||||
|
|
||||||
if (
|
if (
|
||||||
(state.attributes.get(ATTR_TEMPERATURE) is not None)
|
(ATTR_TEMPERATURE in state.attributes)
|
||||||
or (state.attributes.get(ATTR_TARGET_TEMP_HIGH) is not None)
|
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
|
||||||
or (state.attributes.get(ATTR_TARGET_TEMP_LOW) is not None)
|
or (ATTR_TARGET_TEMP_LOW in state.attributes)
|
||||||
):
|
):
|
||||||
await call_service(
|
await call_service(
|
||||||
SERVICE_SET_TEMPERATURE,
|
SERVICE_SET_TEMPERATURE,
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from collections.abc import Awaitable, Callable
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import cast
|
||||||
|
|
||||||
from hass_nabucasa import Cloud
|
from hass_nabucasa import Cloud
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -85,10 +85,6 @@ SIGNAL_CLOUD_CONNECTION_STATE: SignalType[CloudConnectionState] = SignalType(
|
|||||||
"CLOUD_CONNECTION_STATE"
|
"CLOUD_CONNECTION_STATE"
|
||||||
)
|
)
|
||||||
|
|
||||||
_SIGNAL_CLOUDHOOKS_UPDATED: SignalType[dict[str, Any]] = SignalType(
|
|
||||||
"CLOUDHOOKS_UPDATED"
|
|
||||||
)
|
|
||||||
|
|
||||||
STARTUP_REPAIR_DELAY = 1 # 1 hour
|
STARTUP_REPAIR_DELAY = 1 # 1 hour
|
||||||
|
|
||||||
ALEXA_ENTITY_SCHEMA = vol.Schema(
|
ALEXA_ENTITY_SCHEMA = vol.Schema(
|
||||||
@@ -244,24 +240,6 @@ async def async_delete_cloudhook(hass: HomeAssistant, webhook_id: str) -> None:
|
|||||||
await hass.data[DATA_CLOUD].cloudhooks.async_delete(webhook_id)
|
await hass.data[DATA_CLOUD].cloudhooks.async_delete(webhook_id)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_listen_cloudhook_change(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
webhook_id: str,
|
|
||||||
on_change: Callable[[dict[str, Any] | None], None],
|
|
||||||
) -> Callable[[], None]:
|
|
||||||
"""Listen for cloudhook changes for the given webhook and notify when modified or deleted."""
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _handle_cloudhooks_updated(cloudhooks: dict[str, Any]) -> None:
|
|
||||||
"""Handle cloudhooks updated signal."""
|
|
||||||
on_change(cloudhooks.get(webhook_id))
|
|
||||||
|
|
||||||
return async_dispatcher_connect(
|
|
||||||
hass, _SIGNAL_CLOUDHOOKS_UPDATED, _handle_cloudhooks_updated
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@bind_hass
|
@bind_hass
|
||||||
@callback
|
@callback
|
||||||
def async_remote_ui_url(hass: HomeAssistant) -> str:
|
def async_remote_ui_url(hass: HomeAssistant) -> str:
|
||||||
@@ -309,7 +287,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown)
|
||||||
|
|
||||||
_handle_prefs_updated(hass, cloud)
|
_remote_handle_prefs_updated(cloud)
|
||||||
_setup_services(hass, prefs)
|
_setup_services(hass, prefs)
|
||||||
|
|
||||||
async def async_startup_repairs(_: datetime) -> None:
|
async def async_startup_repairs(_: datetime) -> None:
|
||||||
@@ -393,32 +371,26 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _handle_prefs_updated(hass: HomeAssistant, cloud: Cloud[CloudClient]) -> None:
|
def _remote_handle_prefs_updated(cloud: Cloud[CloudClient]) -> None:
|
||||||
"""Register handler for cloud preferences updates."""
|
"""Handle remote preferences updated."""
|
||||||
cur_remote_enabled = cloud.client.prefs.remote_enabled
|
cur_pref = cloud.client.prefs.remote_enabled
|
||||||
cur_cloudhooks = cloud.client.prefs.cloudhooks
|
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
|
|
||||||
async def on_prefs_updated(prefs: CloudPreferences) -> None:
|
# Sync remote connection with prefs
|
||||||
"""Handle cloud preferences updates."""
|
async def remote_prefs_updated(prefs: CloudPreferences) -> None:
|
||||||
nonlocal cur_remote_enabled
|
"""Update remote status."""
|
||||||
nonlocal cur_cloudhooks
|
nonlocal cur_pref
|
||||||
|
|
||||||
# Lock protects cur_ state variables from concurrent updates
|
|
||||||
async with lock:
|
async with lock:
|
||||||
if cur_cloudhooks != prefs.cloudhooks:
|
if prefs.remote_enabled == cur_pref:
|
||||||
cur_cloudhooks = prefs.cloudhooks
|
|
||||||
async_dispatcher_send(hass, _SIGNAL_CLOUDHOOKS_UPDATED, cur_cloudhooks)
|
|
||||||
|
|
||||||
if prefs.remote_enabled == cur_remote_enabled:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
if cur_remote_enabled := prefs.remote_enabled:
|
if cur_pref := prefs.remote_enabled:
|
||||||
await cloud.remote.connect()
|
await cloud.remote.connect()
|
||||||
else:
|
else:
|
||||||
await cloud.remote.disconnect()
|
await cloud.remote.disconnect()
|
||||||
|
|
||||||
cloud.client.prefs.async_listen_updates(on_prefs_updated)
|
cloud.client.prefs.async_listen_updates(remote_prefs_updated)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
|
|||||||
@@ -37,6 +37,13 @@ USER_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||||
|
STEP_RECONFIGURE = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_HOST): cv.string,
|
||||||
|
vol.Required(CONF_PORT): cv.port,
|
||||||
|
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||||
@@ -168,55 +175,36 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle reconfiguration of the device."""
|
"""Handle reconfiguration of the device."""
|
||||||
reconfigure_entry = self._get_reconfigure_entry()
|
reconfigure_entry = self._get_reconfigure_entry()
|
||||||
|
if not user_input:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||||
|
)
|
||||||
|
|
||||||
|
updated_host = user_input[CONF_HOST]
|
||||||
|
|
||||||
|
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||||
|
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
if user_input is not None:
|
try:
|
||||||
updated_host = user_input[CONF_HOST]
|
await validate_input(self.hass, user_input)
|
||||||
|
except CannotConnect:
|
||||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidAuth:
|
||||||
try:
|
errors["base"] = "invalid_auth"
|
||||||
data_to_validate = {
|
except InvalidPin:
|
||||||
CONF_HOST: updated_host,
|
errors["base"] = "invalid_pin"
|
||||||
CONF_PORT: user_input[CONF_PORT],
|
except Exception: # noqa: BLE001
|
||||||
CONF_PIN: user_input[CONF_PIN],
|
_LOGGER.exception("Unexpected exception")
|
||||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
errors["base"] = "unknown"
|
||||||
}
|
else:
|
||||||
await validate_input(self.hass, data_to_validate)
|
return self.async_update_reload_and_abort(
|
||||||
except CannotConnect:
|
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||||
errors["base"] = "cannot_connect"
|
)
|
||||||
except InvalidAuth:
|
|
||||||
errors["base"] = "invalid_auth"
|
|
||||||
except InvalidPin:
|
|
||||||
errors["base"] = "invalid_pin"
|
|
||||||
except Exception: # noqa: BLE001
|
|
||||||
_LOGGER.exception("Unexpected exception")
|
|
||||||
errors["base"] = "unknown"
|
|
||||||
else:
|
|
||||||
data_updates = {
|
|
||||||
CONF_HOST: updated_host,
|
|
||||||
CONF_PORT: user_input[CONF_PORT],
|
|
||||||
CONF_PIN: user_input[CONF_PIN],
|
|
||||||
}
|
|
||||||
return self.async_update_reload_and_abort(
|
|
||||||
reconfigure_entry, data_updates=data_updates
|
|
||||||
)
|
|
||||||
|
|
||||||
schema = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(
|
|
||||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
|
||||||
): cv.string,
|
|
||||||
vol.Required(
|
|
||||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
|
||||||
): cv.port,
|
|
||||||
vol.Optional(CONF_PIN): cv.string,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="reconfigure",
|
step_id="reconfigure",
|
||||||
data_schema=schema,
|
data_schema=STEP_RECONFIGURE,
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||||
"integration_type": "entity",
|
"integration_type": "entity",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.10.28"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
"""The cups component."""
|
|
||||||
|
|
||||||
DOMAIN = "cups"
|
|
||||||
CONF_PRINTERS = "printers"
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "cups",
|
|
||||||
"name": "CUPS",
|
|
||||||
"codeowners": ["@fabaff"],
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/cups",
|
|
||||||
"iot_class": "local_polling",
|
|
||||||
"quality_scale": "legacy",
|
|
||||||
"requirements": ["pycups==2.0.4"]
|
|
||||||
}
|
|
||||||
@@ -1,349 +0,0 @@
|
|||||||
"""Details about printers which are connected to CUPS."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
import importlib
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
|
||||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
|
||||||
SensorEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.const import CONF_HOST, CONF_PORT, PERCENTAGE
|
|
||||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
|
||||||
from homeassistant.exceptions import PlatformNotReady
|
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|
||||||
|
|
||||||
from . import CONF_PRINTERS, DOMAIN
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ATTR_MARKER_TYPE = "marker_type"
|
|
||||||
ATTR_MARKER_LOW_LEVEL = "marker_low_level"
|
|
||||||
ATTR_MARKER_HIGH_LEVEL = "marker_high_level"
|
|
||||||
ATTR_PRINTER_NAME = "printer_name"
|
|
||||||
ATTR_DEVICE_URI = "device_uri"
|
|
||||||
ATTR_PRINTER_INFO = "printer_info"
|
|
||||||
ATTR_PRINTER_IS_SHARED = "printer_is_shared"
|
|
||||||
ATTR_PRINTER_LOCATION = "printer_location"
|
|
||||||
ATTR_PRINTER_MODEL = "printer_model"
|
|
||||||
ATTR_PRINTER_STATE_MESSAGE = "printer_state_message"
|
|
||||||
ATTR_PRINTER_STATE_REASON = "printer_state_reason"
|
|
||||||
ATTR_PRINTER_TYPE = "printer_type"
|
|
||||||
ATTR_PRINTER_URI_SUPPORTED = "printer_uri_supported"
|
|
||||||
|
|
||||||
CONF_IS_CUPS_SERVER = "is_cups_server"
|
|
||||||
|
|
||||||
DEFAULT_HOST = "127.0.0.1"
|
|
||||||
DEFAULT_PORT = 631
|
|
||||||
DEFAULT_IS_CUPS_SERVER = True
|
|
||||||
|
|
||||||
ICON_PRINTER = "mdi:printer"
|
|
||||||
ICON_MARKER = "mdi:water"
|
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(minutes=1)
|
|
||||||
|
|
||||||
PRINTER_STATES = {3: "idle", 4: "printing", 5: "stopped"}
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_PRINTERS): vol.All(cv.ensure_list, [cv.string]),
|
|
||||||
vol.Optional(CONF_IS_CUPS_SERVER, default=DEFAULT_IS_CUPS_SERVER): cv.boolean,
|
|
||||||
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
|
||||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config: ConfigType,
|
|
||||||
add_entities: AddEntitiesCallback,
|
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Set up the CUPS sensor."""
|
|
||||||
host: str = config[CONF_HOST]
|
|
||||||
port: int = config[CONF_PORT]
|
|
||||||
printers: list[str] = config[CONF_PRINTERS]
|
|
||||||
is_cups: bool = config[CONF_IS_CUPS_SERVER]
|
|
||||||
|
|
||||||
create_issue(
|
|
||||||
hass,
|
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
|
||||||
breaks_in_ha_version="2025.12.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_system_packages_yaml_integration",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "CUPS",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_cups:
|
|
||||||
data = CupsData(host, port, None)
|
|
||||||
data.update()
|
|
||||||
if data.available is False:
|
|
||||||
_LOGGER.error("Unable to connect to CUPS server: %s:%s", host, port)
|
|
||||||
raise PlatformNotReady
|
|
||||||
assert data.printers is not None
|
|
||||||
|
|
||||||
dev: list[SensorEntity] = []
|
|
||||||
for printer in printers:
|
|
||||||
if printer not in data.printers:
|
|
||||||
_LOGGER.error("Printer is not present: %s", printer)
|
|
||||||
continue
|
|
||||||
dev.append(CupsSensor(data, printer))
|
|
||||||
|
|
||||||
if "marker-names" in data.attributes[printer]:
|
|
||||||
dev.extend(
|
|
||||||
MarkerSensor(data, printer, marker, True)
|
|
||||||
for marker in data.attributes[printer]["marker-names"]
|
|
||||||
)
|
|
||||||
|
|
||||||
add_entities(dev, True)
|
|
||||||
return
|
|
||||||
|
|
||||||
data = CupsData(host, port, printers)
|
|
||||||
data.update()
|
|
||||||
if data.available is False:
|
|
||||||
_LOGGER.error("Unable to connect to IPP printer: %s:%s", host, port)
|
|
||||||
raise PlatformNotReady
|
|
||||||
|
|
||||||
dev = []
|
|
||||||
for printer in printers:
|
|
||||||
dev.append(IPPSensor(data, printer))
|
|
||||||
|
|
||||||
if "marker-names" in data.attributes[printer]:
|
|
||||||
for marker in data.attributes[printer]["marker-names"]:
|
|
||||||
dev.append(MarkerSensor(data, printer, marker, False))
|
|
||||||
|
|
||||||
add_entities(dev, True)
|
|
||||||
|
|
||||||
|
|
||||||
class CupsSensor(SensorEntity):
|
|
||||||
"""Representation of a CUPS sensor."""
|
|
||||||
|
|
||||||
_attr_icon = ICON_PRINTER
|
|
||||||
|
|
||||||
def __init__(self, data: CupsData, printer_name: str) -> None:
|
|
||||||
"""Initialize the CUPS sensor."""
|
|
||||||
self.data = data
|
|
||||||
self._name = printer_name
|
|
||||||
self._printer: dict[str, Any] | None = None
|
|
||||||
self._attr_available = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
"""Return the name of the entity."""
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def native_value(self):
|
|
||||||
"""Return the state of the sensor."""
|
|
||||||
if self._printer is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
key = self._printer["printer-state"]
|
|
||||||
return PRINTER_STATES.get(key, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extra_state_attributes(self):
|
|
||||||
"""Return the state attributes of the sensor."""
|
|
||||||
if self._printer is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return {
|
|
||||||
ATTR_DEVICE_URI: self._printer["device-uri"],
|
|
||||||
ATTR_PRINTER_INFO: self._printer["printer-info"],
|
|
||||||
ATTR_PRINTER_IS_SHARED: self._printer["printer-is-shared"],
|
|
||||||
ATTR_PRINTER_LOCATION: self._printer["printer-location"],
|
|
||||||
ATTR_PRINTER_MODEL: self._printer["printer-make-and-model"],
|
|
||||||
ATTR_PRINTER_STATE_MESSAGE: self._printer["printer-state-message"],
|
|
||||||
ATTR_PRINTER_STATE_REASON: self._printer["printer-state-reasons"],
|
|
||||||
ATTR_PRINTER_TYPE: self._printer["printer-type"],
|
|
||||||
ATTR_PRINTER_URI_SUPPORTED: self._printer["printer-uri-supported"],
|
|
||||||
}
|
|
||||||
|
|
||||||
def update(self) -> None:
|
|
||||||
"""Get the latest data and updates the states."""
|
|
||||||
self.data.update()
|
|
||||||
assert self.data.printers is not None
|
|
||||||
self._printer = self.data.printers.get(self.name)
|
|
||||||
self._attr_available = self.data.available
|
|
||||||
|
|
||||||
|
|
||||||
class IPPSensor(SensorEntity):
|
|
||||||
"""Implementation of the IPPSensor.
|
|
||||||
|
|
||||||
This sensor represents the status of the printer.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_attr_icon = ICON_PRINTER
|
|
||||||
|
|
||||||
def __init__(self, data: CupsData, printer_name: str) -> None:
|
|
||||||
"""Initialize the sensor."""
|
|
||||||
self.data = data
|
|
||||||
self._printer_name = printer_name
|
|
||||||
self._attributes = None
|
|
||||||
self._attr_available = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
"""Return the name of the sensor."""
|
|
||||||
return self._attributes["printer-make-and-model"]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def native_value(self):
|
|
||||||
"""Return the state of the sensor."""
|
|
||||||
if self._attributes is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
key = self._attributes["printer-state"]
|
|
||||||
return PRINTER_STATES.get(key, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extra_state_attributes(self):
|
|
||||||
"""Return the state attributes of the sensor."""
|
|
||||||
if self._attributes is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
state_attributes = {}
|
|
||||||
|
|
||||||
if "printer-info" in self._attributes:
|
|
||||||
state_attributes[ATTR_PRINTER_INFO] = self._attributes["printer-info"]
|
|
||||||
|
|
||||||
if "printer-location" in self._attributes:
|
|
||||||
state_attributes[ATTR_PRINTER_LOCATION] = self._attributes[
|
|
||||||
"printer-location"
|
|
||||||
]
|
|
||||||
|
|
||||||
if "printer-state-message" in self._attributes:
|
|
||||||
state_attributes[ATTR_PRINTER_STATE_MESSAGE] = self._attributes[
|
|
||||||
"printer-state-message"
|
|
||||||
]
|
|
||||||
|
|
||||||
if "printer-state-reasons" in self._attributes:
|
|
||||||
state_attributes[ATTR_PRINTER_STATE_REASON] = self._attributes[
|
|
||||||
"printer-state-reasons"
|
|
||||||
]
|
|
||||||
|
|
||||||
if "printer-uri-supported" in self._attributes:
|
|
||||||
state_attributes[ATTR_PRINTER_URI_SUPPORTED] = self._attributes[
|
|
||||||
"printer-uri-supported"
|
|
||||||
]
|
|
||||||
|
|
||||||
return state_attributes
|
|
||||||
|
|
||||||
def update(self) -> None:
|
|
||||||
"""Fetch new state data for the sensor."""
|
|
||||||
self.data.update()
|
|
||||||
self._attributes = self.data.attributes.get(self._printer_name)
|
|
||||||
self._attr_available = self.data.available
|
|
||||||
|
|
||||||
|
|
||||||
class MarkerSensor(SensorEntity):
|
|
||||||
"""Implementation of the MarkerSensor.
|
|
||||||
|
|
||||||
This sensor represents the percentage of ink or toner.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_attr_icon = ICON_MARKER
|
|
||||||
_attr_native_unit_of_measurement = PERCENTAGE
|
|
||||||
|
|
||||||
def __init__(self, data: CupsData, printer: str, name: str, is_cups: bool) -> None:
|
|
||||||
"""Initialize the sensor."""
|
|
||||||
self.data = data
|
|
||||||
self._attr_name = name
|
|
||||||
self._printer = printer
|
|
||||||
self._index = data.attributes[printer]["marker-names"].index(name)
|
|
||||||
self._is_cups = is_cups
|
|
||||||
self._attributes: dict[str, Any] | None = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def native_value(self):
|
|
||||||
"""Return the state of the sensor."""
|
|
||||||
if self._attributes is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return self._attributes[self._printer]["marker-levels"][self._index]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extra_state_attributes(self):
|
|
||||||
"""Return the state attributes of the sensor."""
|
|
||||||
if self._attributes is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
high_level = self._attributes[self._printer].get("marker-high-levels")
|
|
||||||
if isinstance(high_level, list):
|
|
||||||
high_level = high_level[self._index]
|
|
||||||
|
|
||||||
low_level = self._attributes[self._printer].get("marker-low-levels")
|
|
||||||
if isinstance(low_level, list):
|
|
||||||
low_level = low_level[self._index]
|
|
||||||
|
|
||||||
marker_types = self._attributes[self._printer]["marker-types"]
|
|
||||||
if isinstance(marker_types, list):
|
|
||||||
marker_types = marker_types[self._index]
|
|
||||||
|
|
||||||
if self._is_cups:
|
|
||||||
printer_name = self._printer
|
|
||||||
else:
|
|
||||||
printer_name = self._attributes[self._printer]["printer-make-and-model"]
|
|
||||||
|
|
||||||
return {
|
|
||||||
ATTR_MARKER_HIGH_LEVEL: high_level,
|
|
||||||
ATTR_MARKER_LOW_LEVEL: low_level,
|
|
||||||
ATTR_MARKER_TYPE: marker_types,
|
|
||||||
ATTR_PRINTER_NAME: printer_name,
|
|
||||||
}
|
|
||||||
|
|
||||||
def update(self) -> None:
|
|
||||||
"""Update the state of the sensor."""
|
|
||||||
# Data fetching is done by CupsSensor/IPPSensor
|
|
||||||
self._attributes = self.data.attributes
|
|
||||||
|
|
||||||
|
|
||||||
class CupsData:
|
|
||||||
"""Get the latest data from CUPS and update the state."""
|
|
||||||
|
|
||||||
def __init__(self, host: str, port: int, ipp_printers: list[str] | None) -> None:
|
|
||||||
"""Initialize the data object."""
|
|
||||||
self._host = host
|
|
||||||
self._port = port
|
|
||||||
self._ipp_printers = ipp_printers
|
|
||||||
self.is_cups = ipp_printers is None
|
|
||||||
self.printers: dict[str, dict[str, Any]] | None = None
|
|
||||||
self.attributes: dict[str, Any] = {}
|
|
||||||
self.available = False
|
|
||||||
|
|
||||||
def update(self) -> None:
|
|
||||||
"""Get the latest data from CUPS."""
|
|
||||||
cups = importlib.import_module("cups")
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = cups.Connection(host=self._host, port=self._port)
|
|
||||||
if self.is_cups:
|
|
||||||
self.printers = conn.getPrinters()
|
|
||||||
assert self.printers is not None
|
|
||||||
for printer in self.printers:
|
|
||||||
self.attributes[printer] = conn.getPrinterAttributes(name=printer)
|
|
||||||
else:
|
|
||||||
assert self._ipp_printers is not None
|
|
||||||
for ipp_printer in self._ipp_printers:
|
|
||||||
self.attributes[ipp_printer] = conn.getPrinterAttributes(
|
|
||||||
uri=f"ipp://{self._host}:{self._port}/{ipp_printer}"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.available = True
|
|
||||||
except RuntimeError:
|
|
||||||
self.available = False
|
|
||||||
@@ -9,7 +9,6 @@ from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.util.ssl import get_default_context
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONF_AUTHORIZE_STRING,
|
CONF_AUTHORIZE_STRING,
|
||||||
@@ -32,13 +31,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: CyncConfigEntry) -> bool
|
|||||||
expires_at=entry.data[CONF_EXPIRES_AT],
|
expires_at=entry.data[CONF_EXPIRES_AT],
|
||||||
)
|
)
|
||||||
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
|
cync_auth = Auth(async_get_clientsession(hass), user=user_info)
|
||||||
ssl_context = get_default_context()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cync = await Cync.create(
|
cync = await Cync.create(cync_auth)
|
||||||
auth=cync_auth,
|
|
||||||
ssl_context=ssl_context,
|
|
||||||
)
|
|
||||||
except AuthFailedError as ex:
|
except AuthFailedError as ex:
|
||||||
raise ConfigEntryAuthFailed("User token invalid") from ex
|
raise ConfigEntryAuthFailed("User token invalid") from ex
|
||||||
except CyncError as ex:
|
except CyncError as ex:
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
"""The decora component."""
|
|
||||||
|
|
||||||
DOMAIN = "decora"
|
|
||||||
@@ -1,166 +0,0 @@
|
|||||||
"""Support for Decora dimmers."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from collections.abc import Callable
|
|
||||||
import copy
|
|
||||||
from functools import wraps
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
from typing import TYPE_CHECKING, Any, Concatenate
|
|
||||||
|
|
||||||
from bluepy.btle import BTLEException
|
|
||||||
import decora
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant import util
|
|
||||||
from homeassistant.components.light import (
|
|
||||||
ATTR_BRIGHTNESS,
|
|
||||||
PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA,
|
|
||||||
ColorMode,
|
|
||||||
LightEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_DEVICES, CONF_NAME
|
|
||||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN
|
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
|
||||||
|
|
||||||
from . import DOMAIN
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _name_validator(config):
|
|
||||||
"""Validate the name."""
|
|
||||||
config = copy.deepcopy(config)
|
|
||||||
for address, device_config in config[CONF_DEVICES].items():
|
|
||||||
if CONF_NAME not in device_config:
|
|
||||||
device_config[CONF_NAME] = util.slugify(address)
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
DEVICE_SCHEMA = vol.Schema(
|
|
||||||
{vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_API_KEY): cv.string}
|
|
||||||
)
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = vol.Schema(
|
|
||||||
vol.All(
|
|
||||||
LIGHT_PLATFORM_SCHEMA.extend(
|
|
||||||
{vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}}
|
|
||||||
),
|
|
||||||
_name_validator,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def retry[_DecoraLightT: DecoraLight, **_P, _R](
|
|
||||||
method: Callable[Concatenate[_DecoraLightT, _P], _R],
|
|
||||||
) -> Callable[Concatenate[_DecoraLightT, _P], _R | None]:
|
|
||||||
"""Retry bluetooth commands."""
|
|
||||||
|
|
||||||
@wraps(method)
|
|
||||||
def wrapper_retry(
|
|
||||||
device: _DecoraLightT, *args: _P.args, **kwargs: _P.kwargs
|
|
||||||
) -> _R | None:
|
|
||||||
"""Try send command and retry on error."""
|
|
||||||
|
|
||||||
initial = time.monotonic()
|
|
||||||
while True:
|
|
||||||
if time.monotonic() - initial >= 10:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
return method(device, *args, **kwargs)
|
|
||||||
except (decora.decoraException, AttributeError, BTLEException):
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Decora connect error for device %s. Reconnecting",
|
|
||||||
device.name,
|
|
||||||
)
|
|
||||||
device._switch.connect() # noqa: SLF001
|
|
||||||
|
|
||||||
return wrapper_retry
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config: ConfigType,
|
|
||||||
add_entities: AddEntitiesCallback,
|
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Set up an Decora switch."""
|
|
||||||
create_issue(
|
|
||||||
hass,
|
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
|
||||||
breaks_in_ha_version="2025.12.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_system_packages_yaml_integration",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "Leviton Decora",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
lights = []
|
|
||||||
for address, device_config in config[CONF_DEVICES].items():
|
|
||||||
device = {}
|
|
||||||
device["name"] = device_config[CONF_NAME]
|
|
||||||
device["key"] = device_config[CONF_API_KEY]
|
|
||||||
device["address"] = address
|
|
||||||
light = DecoraLight(device)
|
|
||||||
lights.append(light)
|
|
||||||
|
|
||||||
add_entities(lights)
|
|
||||||
|
|
||||||
|
|
||||||
class DecoraLight(LightEntity):
|
|
||||||
"""Representation of an Decora light."""
|
|
||||||
|
|
||||||
_attr_color_mode = ColorMode.BRIGHTNESS
|
|
||||||
_attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
|
||||||
|
|
||||||
def __init__(self, device: dict[str, Any]) -> None:
|
|
||||||
"""Initialize the light."""
|
|
||||||
|
|
||||||
self._attr_name = device["name"]
|
|
||||||
self._attr_unique_id = device["address"]
|
|
||||||
self._key = device["key"]
|
|
||||||
self._switch = decora.decora(device["address"], self._key)
|
|
||||||
self._attr_brightness = 0
|
|
||||||
self._attr_is_on = False
|
|
||||||
|
|
||||||
@retry
|
|
||||||
def set_state(self, brightness: int) -> None:
|
|
||||||
"""Set the state of this lamp to the provided brightness."""
|
|
||||||
self._switch.set_brightness(int(brightness / 2.55))
|
|
||||||
self._attr_brightness = brightness
|
|
||||||
|
|
||||||
@retry
|
|
||||||
def turn_on(self, **kwargs: Any) -> None:
|
|
||||||
"""Turn the specified or all lights on."""
|
|
||||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
|
||||||
self._switch.on()
|
|
||||||
self._attr_is_on = True
|
|
||||||
|
|
||||||
if brightness is not None:
|
|
||||||
self.set_state(brightness)
|
|
||||||
|
|
||||||
@retry
|
|
||||||
def turn_off(self, **kwargs: Any) -> None:
|
|
||||||
"""Turn the specified or all lights off."""
|
|
||||||
self._switch.off()
|
|
||||||
self._attr_is_on = False
|
|
||||||
|
|
||||||
@retry
|
|
||||||
def update(self) -> None:
|
|
||||||
"""Synchronise internal state with the actual light state."""
|
|
||||||
self._attr_brightness = self._switch.get_brightness() * 2.55
|
|
||||||
self._attr_is_on = self._switch.get_on()
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "decora",
|
|
||||||
"name": "Leviton Decora",
|
|
||||||
"codeowners": [],
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/decora",
|
|
||||||
"iot_class": "local_polling",
|
|
||||||
"loggers": ["bluepy", "decora"],
|
|
||||||
"quality_scale": "legacy",
|
|
||||||
"requirements": ["bluepy==1.3.0", "decora==0.6"]
|
|
||||||
}
|
|
||||||
@@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -26,7 +25,6 @@ from homeassistant.core import HomeAssistant
|
|||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
from homeassistant.util import Throttle
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -169,7 +167,6 @@ class DecoraWifiLight(LightEntity):
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
_LOGGER.error("Failed to turn off myLeviton switch")
|
_LOGGER.error("Failed to turn off myLeviton switch")
|
||||||
|
|
||||||
@Throttle(timedelta(seconds=30))
|
|
||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Fetch new state data for this switch."""
|
"""Fetch new state data for this switch."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
"""The dlib_face_detect component."""
|
|
||||||
|
|
||||||
DOMAIN = "dlib_face_detect"
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
"""Component that will help set the Dlib face detect processing."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
|
|
||||||
import face_recognition
|
|
||||||
|
|
||||||
from homeassistant.components.image_processing import (
|
|
||||||
PLATFORM_SCHEMA as IMAGE_PROCESSING_PLATFORM_SCHEMA,
|
|
||||||
ImageProcessingFaceEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.const import ATTR_LOCATION, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
|
||||||
from homeassistant.core import (
|
|
||||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
|
||||||
HomeAssistant,
|
|
||||||
split_entity_id,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|
||||||
|
|
||||||
from . import DOMAIN
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = IMAGE_PROCESSING_PLATFORM_SCHEMA
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config: ConfigType,
|
|
||||||
add_entities: AddEntitiesCallback,
|
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Set up the Dlib Face detection platform."""
|
|
||||||
create_issue(
|
|
||||||
hass,
|
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
|
||||||
breaks_in_ha_version="2025.12.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_system_packages_yaml_integration",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "Dlib Face Detect",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
source: list[dict[str, str]] = config[CONF_SOURCE]
|
|
||||||
add_entities(
|
|
||||||
DlibFaceDetectEntity(camera[CONF_ENTITY_ID], camera.get(CONF_NAME))
|
|
||||||
for camera in source
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DlibFaceDetectEntity(ImageProcessingFaceEntity):
|
|
||||||
"""Dlib Face API entity for identify."""
|
|
||||||
|
|
||||||
def __init__(self, camera_entity: str, name: str | None) -> None:
|
|
||||||
"""Initialize Dlib face entity."""
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self._attr_camera_entity = camera_entity
|
|
||||||
|
|
||||||
if name:
|
|
||||||
self._attr_name = name
|
|
||||||
else:
|
|
||||||
self._attr_name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
|
|
||||||
|
|
||||||
def process_image(self, image: bytes) -> None:
|
|
||||||
"""Process image."""
|
|
||||||
|
|
||||||
fak_file = io.BytesIO(image)
|
|
||||||
fak_file.name = "snapshot.jpg"
|
|
||||||
fak_file.seek(0)
|
|
||||||
|
|
||||||
image = face_recognition.load_image_file(fak_file)
|
|
||||||
face_locations = face_recognition.face_locations(image)
|
|
||||||
|
|
||||||
face_locations = [{ATTR_LOCATION: location} for location in face_locations]
|
|
||||||
|
|
||||||
self.process_faces(face_locations, len(face_locations))
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "dlib_face_detect",
|
|
||||||
"name": "Dlib Face Detect",
|
|
||||||
"codeowners": [],
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/dlib_face_detect",
|
|
||||||
"iot_class": "local_push",
|
|
||||||
"loggers": ["face_recognition"],
|
|
||||||
"quality_scale": "legacy",
|
|
||||||
"requirements": ["face-recognition==1.2.3"]
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
"""The dlib_face_identify component."""
|
|
||||||
|
|
||||||
CONF_FACES = "faces"
|
|
||||||
DOMAIN = "dlib_face_identify"
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
"""Component that will help set the Dlib face detect processing."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import face_recognition
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components.image_processing import (
|
|
||||||
CONF_CONFIDENCE,
|
|
||||||
PLATFORM_SCHEMA as IMAGE_PROCESSING_PLATFORM_SCHEMA,
|
|
||||||
FaceInformation,
|
|
||||||
ImageProcessingFaceEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.const import ATTR_NAME, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
|
||||||
from homeassistant.core import (
|
|
||||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
|
||||||
HomeAssistant,
|
|
||||||
split_entity_id,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|
||||||
|
|
||||||
from . import CONF_FACES, DOMAIN
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = IMAGE_PROCESSING_PLATFORM_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_FACES): {cv.string: cv.isfile},
|
|
||||||
vol.Optional(CONF_CONFIDENCE, default=0.6): vol.Coerce(float),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config: ConfigType,
|
|
||||||
add_entities: AddEntitiesCallback,
|
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Set up the Dlib Face detection platform."""
|
|
||||||
create_issue(
|
|
||||||
hass,
|
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
|
||||||
breaks_in_ha_version="2025.12.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_system_packages_yaml_integration",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "Dlib Face Identify",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
confidence: float = config[CONF_CONFIDENCE]
|
|
||||||
faces: dict[str, str] = config[CONF_FACES]
|
|
||||||
source: list[dict[str, str]] = config[CONF_SOURCE]
|
|
||||||
add_entities(
|
|
||||||
DlibFaceIdentifyEntity(
|
|
||||||
camera[CONF_ENTITY_ID],
|
|
||||||
faces,
|
|
||||||
camera.get(CONF_NAME),
|
|
||||||
confidence,
|
|
||||||
)
|
|
||||||
for camera in source
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DlibFaceIdentifyEntity(ImageProcessingFaceEntity):
|
|
||||||
"""Dlib Face API entity for identify."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
camera_entity: str,
|
|
||||||
faces: dict[str, str],
|
|
||||||
name: str | None,
|
|
||||||
tolerance: float,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize Dlib face identify entry."""
|
|
||||||
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self._attr_camera_entity = camera_entity
|
|
||||||
|
|
||||||
if name:
|
|
||||||
self._attr_name = name
|
|
||||||
else:
|
|
||||||
self._attr_name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
|
|
||||||
|
|
||||||
self._faces = {}
|
|
||||||
for face_name, face_file in faces.items():
|
|
||||||
try:
|
|
||||||
image = face_recognition.load_image_file(face_file)
|
|
||||||
self._faces[face_name] = face_recognition.face_encodings(image)[0]
|
|
||||||
except IndexError as err:
|
|
||||||
_LOGGER.error("Failed to parse %s. Error: %s", face_file, err)
|
|
||||||
|
|
||||||
self._tolerance = tolerance
|
|
||||||
|
|
||||||
def process_image(self, image: bytes) -> None:
|
|
||||||
"""Process image."""
|
|
||||||
|
|
||||||
fak_file = io.BytesIO(image)
|
|
||||||
fak_file.name = "snapshot.jpg"
|
|
||||||
fak_file.seek(0)
|
|
||||||
|
|
||||||
image = face_recognition.load_image_file(fak_file)
|
|
||||||
unknowns = face_recognition.face_encodings(image)
|
|
||||||
|
|
||||||
found: list[FaceInformation] = []
|
|
||||||
for unknown_face in unknowns:
|
|
||||||
for name, face in self._faces.items():
|
|
||||||
result = face_recognition.compare_faces(
|
|
||||||
[face], unknown_face, tolerance=self._tolerance
|
|
||||||
)
|
|
||||||
if result[0]:
|
|
||||||
found.append({ATTR_NAME: name})
|
|
||||||
|
|
||||||
self.process_faces(found, len(unknowns))
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "dlib_face_identify",
|
|
||||||
"name": "Dlib Face Identify",
|
|
||||||
"codeowners": [],
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/dlib_face_identify",
|
|
||||||
"iot_class": "local_push",
|
|
||||||
"loggers": ["face_recognition"],
|
|
||||||
"quality_scale": "legacy",
|
|
||||||
"requirements": ["face-recognition==1.2.3"]
|
|
||||||
}
|
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
|
"documentation": "https://www.home-assistant.io/integrations/dlna_dmr",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["async_upnp_client"],
|
"loggers": ["async_upnp_client"],
|
||||||
"requirements": ["async-upnp-client==0.46.0", "getmac==0.9.5"],
|
"requirements": ["async-upnp-client==0.45.0", "getmac==0.9.5"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
"deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"dependencies": ["ssdp"],
|
"dependencies": ["ssdp"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
"documentation": "https://www.home-assistant.io/integrations/dlna_dms",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["async-upnp-client==0.46.0"],
|
"requirements": ["async-upnp-client==0.45.0"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
"deviceType": "urn:schemas-upnp-org:device:MediaServer:1",
|
||||||
|
|||||||
@@ -89,6 +89,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
|
"auto_empty": {
|
||||||
|
"default": "mdi:delete-empty"
|
||||||
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"default": "mdi:alert-circle"
|
"default": "mdi:alert-circle"
|
||||||
},
|
},
|
||||||
@@ -160,6 +163,9 @@
|
|||||||
"advanced_mode": {
|
"advanced_mode": {
|
||||||
"default": "mdi:tune"
|
"default": "mdi:tune"
|
||||||
},
|
},
|
||||||
|
"border_spin": {
|
||||||
|
"default": "mdi:rotate-right"
|
||||||
|
},
|
||||||
"border_switch": {
|
"border_switch": {
|
||||||
"default": "mdi:land-fields"
|
"default": "mdi:land-fields"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from deebot_client.events import (
|
|||||||
NetworkInfoEvent,
|
NetworkInfoEvent,
|
||||||
StatsEvent,
|
StatsEvent,
|
||||||
TotalStatsEvent,
|
TotalStatsEvent,
|
||||||
|
auto_empty,
|
||||||
station,
|
station,
|
||||||
)
|
)
|
||||||
from sucks import VacBot
|
from sucks import VacBot
|
||||||
@@ -158,6 +159,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = (
|
|||||||
device_class=SensorDeviceClass.ENUM,
|
device_class=SensorDeviceClass.ENUM,
|
||||||
options=get_options(station.State),
|
options=get_options(station.State),
|
||||||
),
|
),
|
||||||
|
EcovacsSensorEntityDescription[auto_empty.AutoEmptyEvent](
|
||||||
|
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||||
|
value_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||||
|
key="auto_empty",
|
||||||
|
translation_key="auto_empty",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
options=get_options(auto_empty.Frequency),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -149,6 +149,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
|
"auto_empty": {
|
||||||
|
"name": "Auto-empty frequency",
|
||||||
|
"state": {
|
||||||
|
"auto": "Auto",
|
||||||
|
"smart": "Smart"
|
||||||
|
}
|
||||||
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"name": "Error",
|
"name": "Error",
|
||||||
"state_attributes": {
|
"state_attributes": {
|
||||||
@@ -231,6 +238,9 @@
|
|||||||
"advanced_mode": {
|
"advanced_mode": {
|
||||||
"name": "Advanced mode"
|
"name": "Advanced mode"
|
||||||
},
|
},
|
||||||
|
"border_spin": {
|
||||||
|
"name": "Border spin"
|
||||||
|
},
|
||||||
"border_switch": {
|
"border_switch": {
|
||||||
"name": "Border switch"
|
"name": "Border switch"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -99,6 +99,13 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSwitchEntityDescription, ...] = (
|
|||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
entity_category=EntityCategory.CONFIG,
|
entity_category=EntityCategory.CONFIG,
|
||||||
),
|
),
|
||||||
|
EcovacsSwitchEntityDescription(
|
||||||
|
capability_fn=lambda c: c.settings.border_spin,
|
||||||
|
key="border_spin",
|
||||||
|
translation_key="border_spin",
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
entity_category=EntityCategory.CONFIG,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
"""The eddystone_temperature component."""
|
|
||||||
|
|
||||||
DOMAIN = "eddystone_temperature"
|
|
||||||
CONF_BEACONS = "beacons"
|
|
||||||
CONF_INSTANCE = "instance"
|
|
||||||
CONF_NAMESPACE = "namespace"
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "eddystone_temperature",
|
|
||||||
"name": "Eddystone",
|
|
||||||
"codeowners": [],
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/eddystone_temperature",
|
|
||||||
"iot_class": "local_polling",
|
|
||||||
"loggers": ["beacontools"],
|
|
||||||
"quality_scale": "legacy",
|
|
||||||
"requirements": ["beacontools[scan]==2.1.0"]
|
|
||||||
}
|
|
||||||
@@ -1,211 +0,0 @@
|
|||||||
"""Read temperature information from Eddystone beacons.
|
|
||||||
|
|
||||||
Your beacons must be configured to transmit UID (for identification) and TLM
|
|
||||||
(for temperature) frames.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from beacontools import BeaconScanner, EddystoneFilter, EddystoneTLMFrame
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
|
||||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
|
||||||
SensorDeviceClass,
|
|
||||||
SensorEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.const import (
|
|
||||||
CONF_NAME,
|
|
||||||
EVENT_HOMEASSISTANT_START,
|
|
||||||
EVENT_HOMEASSISTANT_STOP,
|
|
||||||
STATE_UNKNOWN,
|
|
||||||
UnitOfTemperature,
|
|
||||||
)
|
|
||||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, Event, HomeAssistant
|
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|
||||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|
||||||
|
|
||||||
from . import CONF_BEACONS, CONF_INSTANCE, CONF_NAMESPACE, DOMAIN
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CONF_BT_DEVICE_ID = "bt_device_id"
|
|
||||||
|
|
||||||
|
|
||||||
BEACON_SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_NAMESPACE): cv.string,
|
|
||||||
vol.Required(CONF_INSTANCE): cv.string,
|
|
||||||
vol.Optional(CONF_NAME): cv.string,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
|
||||||
{
|
|
||||||
vol.Optional(CONF_BT_DEVICE_ID, default=0): cv.positive_int,
|
|
||||||
vol.Required(CONF_BEACONS): vol.Schema({cv.string: BEACON_SCHEMA}),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_platform(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config: ConfigType,
|
|
||||||
add_entities: AddEntitiesCallback,
|
|
||||||
discovery_info: DiscoveryInfoType | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Validate configuration, create devices and start monitoring thread."""
|
|
||||||
create_issue(
|
|
||||||
hass,
|
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
|
||||||
breaks_in_ha_version="2025.12.0",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_system_packages_yaml_integration",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "Eddystone",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
bt_device_id: int = config[CONF_BT_DEVICE_ID]
|
|
||||||
|
|
||||||
beacons: dict[str, dict[str, str]] = config[CONF_BEACONS]
|
|
||||||
devices: list[EddystoneTemp] = []
|
|
||||||
|
|
||||||
for dev_name, properties in beacons.items():
|
|
||||||
namespace = get_from_conf(properties, CONF_NAMESPACE, 20)
|
|
||||||
instance = get_from_conf(properties, CONF_INSTANCE, 12)
|
|
||||||
name = properties.get(CONF_NAME, dev_name)
|
|
||||||
|
|
||||||
if instance is None or namespace is None:
|
|
||||||
_LOGGER.error("Skipping %s", dev_name)
|
|
||||||
continue
|
|
||||||
|
|
||||||
devices.append(EddystoneTemp(name, namespace, instance))
|
|
||||||
|
|
||||||
if devices:
|
|
||||||
mon = Monitor(hass, devices, bt_device_id)
|
|
||||||
|
|
||||||
def monitor_stop(event: Event) -> None:
|
|
||||||
"""Stop the monitor thread."""
|
|
||||||
_LOGGER.debug("Stopping scanner for Eddystone beacons")
|
|
||||||
mon.stop()
|
|
||||||
|
|
||||||
def monitor_start(event: Event) -> None:
|
|
||||||
"""Start the monitor thread."""
|
|
||||||
_LOGGER.debug("Starting scanner for Eddystone beacons")
|
|
||||||
mon.start()
|
|
||||||
|
|
||||||
add_entities(devices)
|
|
||||||
mon.start()
|
|
||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)
|
|
||||||
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, monitor_start)
|
|
||||||
else:
|
|
||||||
_LOGGER.warning("No devices were added")
|
|
||||||
|
|
||||||
|
|
||||||
def get_from_conf(config: dict[str, str], config_key: str, length: int) -> str | None:
|
|
||||||
"""Retrieve value from config and validate length."""
|
|
||||||
string = config[config_key]
|
|
||||||
if len(string) != length:
|
|
||||||
_LOGGER.error(
|
|
||||||
(
|
|
||||||
"Error in configuration parameter %s: Must be exactly %d "
|
|
||||||
"bytes. Device will not be added"
|
|
||||||
),
|
|
||||||
config_key,
|
|
||||||
length / 2,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
return string
|
|
||||||
|
|
||||||
|
|
||||||
class EddystoneTemp(SensorEntity):
|
|
||||||
"""Representation of a temperature sensor."""
|
|
||||||
|
|
||||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
|
||||||
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
|
|
||||||
_attr_should_poll = False
|
|
||||||
|
|
||||||
def __init__(self, name: str, namespace: str, instance: str) -> None:
|
|
||||||
"""Initialize a sensor."""
|
|
||||||
self._attr_name = name
|
|
||||||
self.namespace = namespace
|
|
||||||
self.instance = instance
|
|
||||||
self.bt_addr = None
|
|
||||||
self.temperature = STATE_UNKNOWN
|
|
||||||
|
|
||||||
@property
|
|
||||||
def native_value(self):
|
|
||||||
"""Return the state of the device."""
|
|
||||||
return self.temperature
|
|
||||||
|
|
||||||
|
|
||||||
class Monitor:
|
|
||||||
"""Continuously scan for BLE advertisements."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, hass: HomeAssistant, devices: list[EddystoneTemp], bt_device_id: int
|
|
||||||
) -> None:
|
|
||||||
"""Construct interface object."""
|
|
||||||
self.hass = hass
|
|
||||||
|
|
||||||
# List of beacons to monitor
|
|
||||||
self.devices = devices
|
|
||||||
# Number of the bt device (hciX)
|
|
||||||
self.bt_device_id = bt_device_id
|
|
||||||
|
|
||||||
def callback(bt_addr, _, packet, additional_info):
|
|
||||||
"""Handle new packets."""
|
|
||||||
self.process_packet(
|
|
||||||
additional_info["namespace"],
|
|
||||||
additional_info["instance"],
|
|
||||||
packet.temperature,
|
|
||||||
)
|
|
||||||
|
|
||||||
device_filters = [EddystoneFilter(d.namespace, d.instance) for d in devices]
|
|
||||||
|
|
||||||
self.scanner = BeaconScanner(
|
|
||||||
callback, bt_device_id, device_filters, EddystoneTLMFrame
|
|
||||||
)
|
|
||||||
self.scanning = False
|
|
||||||
|
|
||||||
def start(self) -> None:
|
|
||||||
"""Continuously scan for BLE advertisements."""
|
|
||||||
if not self.scanning:
|
|
||||||
self.scanner.start()
|
|
||||||
self.scanning = True
|
|
||||||
else:
|
|
||||||
_LOGGER.debug("start() called, but scanner is already running")
|
|
||||||
|
|
||||||
def process_packet(self, namespace, instance, temperature) -> None:
|
|
||||||
"""Assign temperature to device."""
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Received temperature for <%s,%s>: %d", namespace, instance, temperature
|
|
||||||
)
|
|
||||||
|
|
||||||
for dev in self.devices:
|
|
||||||
if (
|
|
||||||
dev.namespace == namespace
|
|
||||||
and dev.instance == instance
|
|
||||||
and dev.temperature != temperature
|
|
||||||
):
|
|
||||||
dev.temperature = temperature
|
|
||||||
dev.schedule_update_ha_state()
|
|
||||||
|
|
||||||
def stop(self) -> None:
|
|
||||||
"""Signal runner to stop and join thread."""
|
|
||||||
if self.scanning:
|
|
||||||
_LOGGER.debug("Stopping")
|
|
||||||
self.scanner.stop()
|
|
||||||
_LOGGER.debug("Stopped")
|
|
||||||
self.scanning = False
|
|
||||||
else:
|
|
||||||
_LOGGER.debug("stop() called but scanner was not running")
|
|
||||||
1
homeassistant/components/enmax/__init__.py
Normal file
1
homeassistant/components/enmax/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""Virtual integration: Enmax Energy."""
|
||||||
6
homeassistant/components/enmax/manifest.json
Normal file
6
homeassistant/components/enmax/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"domain": "enmax",
|
||||||
|
"name": "Enmax Energy",
|
||||||
|
"integration_type": "virtual",
|
||||||
|
"supported_by": "opower"
|
||||||
|
}
|
||||||
@@ -75,12 +75,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
|
|||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> bool:
|
||||||
"""Unload an esphome config entry."""
|
"""Unload an esphome config entry."""
|
||||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
entry_data = await cleanup_instance(entry)
|
||||||
entry, entry.runtime_data.loaded_platforms
|
return await hass.config_entries.async_unload_platforms(
|
||||||
|
entry, entry_data.loaded_platforms
|
||||||
)
|
)
|
||||||
if unload_ok:
|
|
||||||
await cleanup_instance(entry)
|
|
||||||
return unload_ok
|
|
||||||
|
|
||||||
|
|
||||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||||
|
|||||||
@@ -17,7 +17,7 @@
|
|||||||
"mqtt": ["esphome/discover/#"],
|
"mqtt": ["esphome/discover/#"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"aioesphomeapi==42.4.0",
|
"aioesphomeapi==42.5.0",
|
||||||
"esphome-dashboard-api==1.3.0",
|
"esphome-dashboard-api==1.3.0",
|
||||||
"bleak-esphome==3.4.0"
|
"bleak-esphome==3.4.0"
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -1,152 +0,0 @@
|
|||||||
"""The Flick Electric integration."""
|
|
||||||
|
|
||||||
from datetime import datetime as dt
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
import jwt
|
|
||||||
from pyflick import FlickAPI
|
|
||||||
from pyflick.authentication import SimpleFlickAuth
|
|
||||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
|
||||||
|
|
||||||
from homeassistant.const import (
|
|
||||||
CONF_ACCESS_TOKEN,
|
|
||||||
CONF_CLIENT_ID,
|
|
||||||
CONF_CLIENT_SECRET,
|
|
||||||
CONF_PASSWORD,
|
|
||||||
CONF_USERNAME,
|
|
||||||
Platform,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.helpers import aiohttp_client
|
|
||||||
|
|
||||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
|
|
||||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CONF_ID_TOKEN = "id_token"
|
|
||||||
|
|
||||||
PLATFORMS = [Platform.SENSOR]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
|
||||||
"""Set up Flick Electric from a config entry."""
|
|
||||||
auth = HassFlickAuth(hass, entry)
|
|
||||||
|
|
||||||
coordinator = FlickElectricDataCoordinator(hass, entry, FlickAPI(auth))
|
|
||||||
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
|
||||||
|
|
||||||
entry.runtime_data = coordinator
|
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
|
||||||
"""Unload a config entry."""
|
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_migrate_entry(
|
|
||||||
hass: HomeAssistant, config_entry: FlickConfigEntry
|
|
||||||
) -> bool:
|
|
||||||
"""Migrate old entry."""
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Migrating configuration from version %s.%s",
|
|
||||||
config_entry.version,
|
|
||||||
config_entry.minor_version,
|
|
||||||
)
|
|
||||||
|
|
||||||
if config_entry.version > 2:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if config_entry.version == 1:
|
|
||||||
api = FlickAPI(HassFlickAuth(hass, config_entry))
|
|
||||||
|
|
||||||
accounts = await api.getCustomerAccounts()
|
|
||||||
active_accounts = [
|
|
||||||
account for account in accounts if account["status"] == "active"
|
|
||||||
]
|
|
||||||
|
|
||||||
# A single active account can be auto-migrated
|
|
||||||
if (len(active_accounts)) == 1:
|
|
||||||
account = active_accounts[0]
|
|
||||||
|
|
||||||
new_data = {**config_entry.data}
|
|
||||||
new_data[CONF_ACCOUNT_ID] = account["id"]
|
|
||||||
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
|
|
||||||
hass.config_entries.async_update_entry(
|
|
||||||
config_entry,
|
|
||||||
title=account["address"],
|
|
||||||
unique_id=account["id"],
|
|
||||||
data=new_data,
|
|
||||||
version=2,
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
config_entry.async_start_reauth(hass, data={**config_entry.data})
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class HassFlickAuth(SimpleFlickAuth):
|
|
||||||
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
|
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
|
|
||||||
"""Flick authentication based on a Home Assistant entity config."""
|
|
||||||
super().__init__(
|
|
||||||
username=entry.data[CONF_USERNAME],
|
|
||||||
password=entry.data[CONF_PASSWORD],
|
|
||||||
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
|
||||||
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
|
||||||
websession=aiohttp_client.async_get_clientsession(hass),
|
|
||||||
)
|
|
||||||
self._entry = entry
|
|
||||||
self._hass = hass
|
|
||||||
|
|
||||||
async def _get_entry_token(self) -> dict[str, Any]:
|
|
||||||
# No token saved, generate one
|
|
||||||
if (
|
|
||||||
CONF_TOKEN_EXPIRY not in self._entry.data
|
|
||||||
or CONF_ACCESS_TOKEN not in self._entry.data
|
|
||||||
):
|
|
||||||
await self._update_token()
|
|
||||||
|
|
||||||
# Token is expired, generate a new one
|
|
||||||
if self._entry.data[CONF_TOKEN_EXPIRY] <= dt.now().timestamp():
|
|
||||||
await self._update_token()
|
|
||||||
|
|
||||||
return self._entry.data[CONF_ACCESS_TOKEN]
|
|
||||||
|
|
||||||
async def _update_token(self):
|
|
||||||
_LOGGER.debug("Fetching new access token")
|
|
||||||
|
|
||||||
token = await super().get_new_token(
|
|
||||||
self._username, self._password, self._client_id, self._client_secret
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.debug("New token: %s", token)
|
|
||||||
|
|
||||||
# Flick will send the same token, but expiry is relative - so grab it from the token
|
|
||||||
token_decoded = jwt.decode(
|
|
||||||
token[CONF_ID_TOKEN], options={"verify_signature": False}
|
|
||||||
)
|
|
||||||
|
|
||||||
self._hass.config_entries.async_update_entry(
|
|
||||||
self._entry,
|
|
||||||
data={
|
|
||||||
**self._entry.data,
|
|
||||||
CONF_ACCESS_TOKEN: token,
|
|
||||||
CONF_TOKEN_EXPIRY: token_decoded["exp"],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_get_access_token(self):
|
|
||||||
"""Get Access Token from HASS Storage."""
|
|
||||||
token = await self._get_entry_token()
|
|
||||||
|
|
||||||
return token[CONF_ID_TOKEN]
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
"""Config Flow for Flick Electric integration."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from collections.abc import Mapping
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiohttp import ClientResponseError
|
|
||||||
from pyflick import FlickAPI
|
|
||||||
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
|
|
||||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
|
||||||
from pyflick.types import APIException, AuthException, CustomerAccount
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
|
||||||
from homeassistant.const import (
|
|
||||||
CONF_CLIENT_ID,
|
|
||||||
CONF_CLIENT_SECRET,
|
|
||||||
CONF_PASSWORD,
|
|
||||||
CONF_USERNAME,
|
|
||||||
)
|
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
|
||||||
from homeassistant.helpers import aiohttp_client
|
|
||||||
from homeassistant.helpers.selector import (
|
|
||||||
SelectOptionDict,
|
|
||||||
SelectSelector,
|
|
||||||
SelectSelectorConfig,
|
|
||||||
SelectSelectorMode,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
LOGIN_SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_USERNAME): str,
|
|
||||||
vol.Required(CONF_PASSWORD): str,
|
|
||||||
vol.Optional(CONF_CLIENT_ID): str,
|
|
||||||
vol.Optional(CONF_CLIENT_SECRET): str,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
||||||
"""Flick config flow."""
|
|
||||||
|
|
||||||
VERSION = 2
|
|
||||||
auth: AbstractFlickAuth
|
|
||||||
accounts: list[CustomerAccount]
|
|
||||||
data: dict[str, Any]
|
|
||||||
|
|
||||||
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
|
|
||||||
self.auth = SimpleFlickAuth(
|
|
||||||
username=user_input[CONF_USERNAME],
|
|
||||||
password=user_input[CONF_PASSWORD],
|
|
||||||
websession=aiohttp_client.async_get_clientsession(self.hass),
|
|
||||||
client_id=user_input.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
|
||||||
client_secret=user_input.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with asyncio.timeout(60):
|
|
||||||
token = await self.auth.async_get_access_token()
|
|
||||||
except (TimeoutError, ClientResponseError) as err:
|
|
||||||
raise CannotConnect from err
|
|
||||||
except AuthException as err:
|
|
||||||
raise InvalidAuth from err
|
|
||||||
|
|
||||||
return token is not None
|
|
||||||
|
|
||||||
async def async_step_select_account(
|
|
||||||
self, user_input: Mapping[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Ask user to select account."""
|
|
||||||
|
|
||||||
errors = {}
|
|
||||||
if user_input is not None and CONF_ACCOUNT_ID in user_input:
|
|
||||||
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
|
|
||||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
|
||||||
user_input[CONF_ACCOUNT_ID]
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
# Ensure supply node is active
|
|
||||||
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
|
|
||||||
except (APIException, ClientResponseError):
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
except AuthException:
|
|
||||||
# We should never get here as we have a valid token
|
|
||||||
return self.async_abort(reason="no_permissions")
|
|
||||||
else:
|
|
||||||
# Supply node is active
|
|
||||||
return await self._async_create_entry()
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
|
|
||||||
except (APIException, ClientResponseError):
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
|
|
||||||
active_accounts = [a for a in self.accounts if a["status"] == "active"]
|
|
||||||
|
|
||||||
if len(active_accounts) == 0:
|
|
||||||
return self.async_abort(reason="no_accounts")
|
|
||||||
|
|
||||||
if len(active_accounts) == 1:
|
|
||||||
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
|
|
||||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
|
||||||
active_accounts[0]["id"]
|
|
||||||
)
|
|
||||||
|
|
||||||
return await self._async_create_entry()
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="select_account",
|
|
||||||
data_schema=vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
|
|
||||||
SelectSelectorConfig(
|
|
||||||
options=[
|
|
||||||
SelectOptionDict(
|
|
||||||
value=account["id"], label=account["address"]
|
|
||||||
)
|
|
||||||
for account in active_accounts
|
|
||||||
],
|
|
||||||
mode=SelectSelectorMode.LIST,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
),
|
|
||||||
errors=errors,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_user(
|
|
||||||
self, user_input: Mapping[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Handle gathering login info."""
|
|
||||||
errors = {}
|
|
||||||
if user_input is not None:
|
|
||||||
try:
|
|
||||||
await self._validate_auth(user_input)
|
|
||||||
except CannotConnect:
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
except InvalidAuth:
|
|
||||||
errors["base"] = "invalid_auth"
|
|
||||||
except Exception:
|
|
||||||
_LOGGER.exception("Unexpected exception")
|
|
||||||
errors["base"] = "unknown"
|
|
||||||
else:
|
|
||||||
self.data = dict(user_input)
|
|
||||||
return await self.async_step_select_account(user_input)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_reauth(
|
|
||||||
self, user_input: Mapping[str, Any]
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Handle re-authentication."""
|
|
||||||
|
|
||||||
self.data = {**user_input}
|
|
||||||
|
|
||||||
return await self.async_step_user(user_input)
|
|
||||||
|
|
||||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
|
||||||
"""Create an entry for the flow."""
|
|
||||||
|
|
||||||
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
|
|
||||||
|
|
||||||
account = self._get_account(self.data[CONF_ACCOUNT_ID])
|
|
||||||
|
|
||||||
if self.source == SOURCE_REAUTH:
|
|
||||||
# Migration completed
|
|
||||||
if self._get_reauth_entry().version == 1:
|
|
||||||
self.hass.config_entries.async_update_entry(
|
|
||||||
self._get_reauth_entry(),
|
|
||||||
unique_id=self.unique_id,
|
|
||||||
data=self.data,
|
|
||||||
version=self.VERSION,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_update_reload_and_abort(
|
|
||||||
self._get_reauth_entry(),
|
|
||||||
unique_id=self.unique_id,
|
|
||||||
title=account["address"],
|
|
||||||
data=self.data,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._abort_if_unique_id_configured()
|
|
||||||
|
|
||||||
return self.async_create_entry(
|
|
||||||
title=account["address"],
|
|
||||||
data=self.data,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_account(self, account_id: str) -> CustomerAccount:
|
|
||||||
"""Get the account for the account ID."""
|
|
||||||
return next(a for a in self.accounts if a["id"] == account_id)
|
|
||||||
|
|
||||||
def _get_supply_node_ref(self, account_id: str) -> str:
|
|
||||||
"""Get the supply node ref for the account."""
|
|
||||||
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
|
|
||||||
|
|
||||||
|
|
||||||
class CannotConnect(HomeAssistantError):
|
|
||||||
"""Error to indicate we cannot connect."""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidAuth(HomeAssistantError):
|
|
||||||
"""Error to indicate there is invalid auth."""
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
"""Constants for the Flick Electric integration."""
|
|
||||||
|
|
||||||
DOMAIN = "flick_electric"
|
|
||||||
|
|
||||||
CONF_TOKEN_EXPIRY = "expires"
|
|
||||||
CONF_ACCOUNT_ID = "account_id"
|
|
||||||
CONF_SUPPLY_NODE_REF = "supply_node_ref"
|
|
||||||
|
|
||||||
ATTR_START_AT = "start_at"
|
|
||||||
ATTR_END_AT = "end_at"
|
|
||||||
|
|
||||||
ATTR_COMPONENTS = ["retailer", "ea", "metering", "generation", "admin", "network"]
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
"""Data Coordinator for Flick Electric."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from pyflick import FlickAPI, FlickPrice
|
|
||||||
from pyflick.types import APIException, AuthException
|
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
|
||||||
|
|
||||||
from .const import CONF_SUPPLY_NODE_REF
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(minutes=5)
|
|
||||||
|
|
||||||
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
|
|
||||||
|
|
||||||
|
|
||||||
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
|
|
||||||
"""Coordinator for flick power price."""
|
|
||||||
|
|
||||||
config_entry: FlickConfigEntry
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
config_entry: FlickConfigEntry,
|
|
||||||
api: FlickAPI,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize FlickElectricDataCoordinator."""
|
|
||||||
super().__init__(
|
|
||||||
hass,
|
|
||||||
_LOGGER,
|
|
||||||
config_entry=config_entry,
|
|
||||||
name="Flick Electric",
|
|
||||||
update_interval=SCAN_INTERVAL,
|
|
||||||
)
|
|
||||||
self.supply_node_ref = config_entry.data[CONF_SUPPLY_NODE_REF]
|
|
||||||
self._api = api
|
|
||||||
|
|
||||||
async def _async_update_data(self) -> FlickPrice:
|
|
||||||
"""Fetch pricing data from Flick Electric."""
|
|
||||||
try:
|
|
||||||
async with asyncio.timeout(60):
|
|
||||||
return await self._api.getPricing(self.supply_node_ref)
|
|
||||||
except AuthException as err:
|
|
||||||
raise ConfigEntryAuthFailed from err
|
|
||||||
except (APIException, aiohttp.ClientResponseError) as err:
|
|
||||||
raise UpdateFailed from err
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "flick_electric",
|
|
||||||
"name": "Flick Electric",
|
|
||||||
"codeowners": ["@ZephireNZ"],
|
|
||||||
"config_flow": true,
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/flick_electric",
|
|
||||||
"integration_type": "service",
|
|
||||||
"iot_class": "cloud_polling",
|
|
||||||
"loggers": ["pyflick"],
|
|
||||||
"requirements": ["PyFlick==1.1.3"]
|
|
||||||
}
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
"""Support for Flick Electric Pricing data."""
|
|
||||||
|
|
||||||
from datetime import timedelta
|
|
||||||
from decimal import Decimal
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity
|
|
||||||
from homeassistant.const import CURRENCY_CENT, UnitOfEnergy
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|
||||||
|
|
||||||
from .const import ATTR_COMPONENTS, ATTR_END_AT, ATTR_START_AT
|
|
||||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
SCAN_INTERVAL = timedelta(minutes=5)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
entry: FlickConfigEntry,
|
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
||||||
) -> None:
|
|
||||||
"""Flick Sensor Setup."""
|
|
||||||
coordinator = entry.runtime_data
|
|
||||||
|
|
||||||
async_add_entities([FlickPricingSensor(coordinator)])
|
|
||||||
|
|
||||||
|
|
||||||
class FlickPricingSensor(CoordinatorEntity[FlickElectricDataCoordinator], SensorEntity):
|
|
||||||
"""Entity object for Flick Electric sensor."""
|
|
||||||
|
|
||||||
_attr_attribution = "Data provided by Flick Electric"
|
|
||||||
_attr_native_unit_of_measurement = f"{CURRENCY_CENT}/{UnitOfEnergy.KILO_WATT_HOUR}"
|
|
||||||
_attr_has_entity_name = True
|
|
||||||
_attr_translation_key = "power_price"
|
|
||||||
|
|
||||||
def __init__(self, coordinator: FlickElectricDataCoordinator) -> None:
|
|
||||||
"""Entity object for Flick Electric sensor."""
|
|
||||||
super().__init__(coordinator)
|
|
||||||
|
|
||||||
self._attr_unique_id = f"{coordinator.supply_node_ref}_pricing"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def native_value(self) -> Decimal:
|
|
||||||
"""Return the state of the sensor."""
|
|
||||||
# The API should return a unit price with quantity of 1.0 when no start/end time is provided
|
|
||||||
if self.coordinator.data.quantity != 1:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Unexpected quantity for unit price: %s", self.coordinator.data
|
|
||||||
)
|
|
||||||
return self.coordinator.data.cost * 100
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
|
||||||
"""Return the state attributes."""
|
|
||||||
components: dict[str, float] = {}
|
|
||||||
|
|
||||||
for component in self.coordinator.data.components:
|
|
||||||
if component.charge_setter not in ATTR_COMPONENTS:
|
|
||||||
_LOGGER.warning("Found unknown component: %s", component.charge_setter)
|
|
||||||
continue
|
|
||||||
|
|
||||||
components[component.charge_setter] = float(component.value * 100)
|
|
||||||
|
|
||||||
return {
|
|
||||||
ATTR_START_AT: self.coordinator.data.start_at,
|
|
||||||
ATTR_END_AT: self.coordinator.data.end_at,
|
|
||||||
**components,
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
{
|
|
||||||
"config": {
|
|
||||||
"abort": {
|
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
|
||||||
"no_accounts": "No services are active on this Flick account",
|
|
||||||
"no_permissions": "Cannot get pricing for this account. Please check user permissions.",
|
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
|
||||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
|
||||||
},
|
|
||||||
"step": {
|
|
||||||
"select_account": {
|
|
||||||
"data": {
|
|
||||||
"account_id": "Account"
|
|
||||||
},
|
|
||||||
"title": "Select account"
|
|
||||||
},
|
|
||||||
"user": {
|
|
||||||
"data": {
|
|
||||||
"client_id": "Client ID (optional)",
|
|
||||||
"client_secret": "Client Secret (optional)",
|
|
||||||
"password": "[%key:common::config_flow::data::password%]",
|
|
||||||
"username": "[%key:common::config_flow::data::username%]"
|
|
||||||
},
|
|
||||||
"title": "Flick Login Credentials"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"entity": {
|
|
||||||
"sensor": {
|
|
||||||
"power_price": {
|
|
||||||
"name": "Flick power price"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -3,7 +3,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -153,9 +152,7 @@ class HassFoscamCamera(FoscamEntity, Camera):
|
|||||||
async def stream_source(self) -> str | None:
|
async def stream_source(self) -> str | None:
|
||||||
"""Return the stream source."""
|
"""Return the stream source."""
|
||||||
if self._rtsp_port:
|
if self._rtsp_port:
|
||||||
_username = quote(self._username)
|
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
|
||||||
_password = quote(self._password)
|
|
||||||
return f"rtsp://{_username}:{_password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,6 @@ class FoscamDeviceInfo:
|
|||||||
supports_speak_volume_adjustment: bool
|
supports_speak_volume_adjustment: bool
|
||||||
supports_pet_adjustment: bool
|
supports_pet_adjustment: bool
|
||||||
supports_car_adjustment: bool
|
supports_car_adjustment: bool
|
||||||
supports_human_adjustment: bool
|
|
||||||
supports_wdr_adjustment: bool
|
supports_wdr_adjustment: bool
|
||||||
supports_hdr_adjustment: bool
|
supports_hdr_adjustment: bool
|
||||||
|
|
||||||
@@ -116,28 +115,20 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
|||||||
is_open_wdr = None
|
is_open_wdr = None
|
||||||
is_open_hdr = None
|
is_open_hdr = None
|
||||||
reserve3 = product_info.get("reserve4")
|
reserve3 = product_info.get("reserve4")
|
||||||
model = product_info.get("model")
|
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
||||||
model_int = int(model) if model is not None else 7002
|
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
||||||
if model_int > 7001:
|
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
||||||
reserve3_int = int(reserve3) if reserve3 is not None else 0
|
if supports_wdr_adjustment_val:
|
||||||
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
|
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
||||||
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
|
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
||||||
if supports_wdr_adjustment_val:
|
is_open_wdr = bool(int(mode))
|
||||||
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
|
elif supports_hdr_adjustment_val:
|
||||||
mode = (
|
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
||||||
is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
|
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
||||||
)
|
is_open_hdr = bool(int(mode))
|
||||||
is_open_wdr = bool(int(mode))
|
|
||||||
elif supports_hdr_adjustment_val:
|
|
||||||
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
|
|
||||||
mode = (
|
|
||||||
is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
|
|
||||||
)
|
|
||||||
is_open_hdr = bool(int(mode))
|
|
||||||
else:
|
|
||||||
supports_wdr_adjustment_val = False
|
|
||||||
supports_hdr_adjustment_val = False
|
|
||||||
ret_sw, software_capabilities = self.session.getSWCapabilities()
|
ret_sw, software_capabilities = self.session.getSWCapabilities()
|
||||||
|
|
||||||
supports_speak_volume_adjustment_val = (
|
supports_speak_volume_adjustment_val = (
|
||||||
bool(int(software_capabilities.get("swCapabilities1")) & 32)
|
bool(int(software_capabilities.get("swCapabilities1")) & 32)
|
||||||
if ret_sw == 0
|
if ret_sw == 0
|
||||||
@@ -153,32 +144,24 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
|||||||
if ret_sw == 0
|
if ret_sw == 0
|
||||||
else False
|
else False
|
||||||
)
|
)
|
||||||
human_adjustment_val = (
|
ret_md, mothion_config_val = self.session.get_motion_detect_config()
|
||||||
bool(int(software_capabilities.get("swCapabilities2")) & 128)
|
|
||||||
if ret_sw == 0
|
|
||||||
else False
|
|
||||||
)
|
|
||||||
ret_md, motion_config_val = self.session.get_motion_detect_config()
|
|
||||||
if pet_adjustment_val:
|
if pet_adjustment_val:
|
||||||
is_pet_detection_on_val = (
|
is_pet_detection_on_val = (
|
||||||
motion_config_val.get("petEnable") == "1" if ret_md == 0 else False
|
mothion_config_val["petEnable"] == "1" if ret_md == 0 else False
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
is_pet_detection_on_val = False
|
is_pet_detection_on_val = False
|
||||||
|
|
||||||
if car_adjustment_val:
|
if car_adjustment_val:
|
||||||
is_car_detection_on_val = (
|
is_car_detection_on_val = (
|
||||||
motion_config_val.get("carEnable") == "1" if ret_md == 0 else False
|
mothion_config_val["carEnable"] == "1" if ret_md == 0 else False
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
is_car_detection_on_val = False
|
is_car_detection_on_val = False
|
||||||
|
|
||||||
if human_adjustment_val:
|
is_human_detection_on_val = (
|
||||||
is_human_detection_on_val = (
|
mothion_config_val["humanEnable"] == "1" if ret_md == 0 else False
|
||||||
motion_config_val.get("humanEnable") == "1" if ret_md == 0 else False
|
)
|
||||||
)
|
|
||||||
else:
|
|
||||||
is_human_detection_on_val = False
|
|
||||||
|
|
||||||
return FoscamDeviceInfo(
|
return FoscamDeviceInfo(
|
||||||
dev_info=dev_info,
|
dev_info=dev_info,
|
||||||
@@ -196,7 +179,6 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
|
|||||||
supports_speak_volume_adjustment=supports_speak_volume_adjustment_val,
|
supports_speak_volume_adjustment=supports_speak_volume_adjustment_val,
|
||||||
supports_pet_adjustment=pet_adjustment_val,
|
supports_pet_adjustment=pet_adjustment_val,
|
||||||
supports_car_adjustment=car_adjustment_val,
|
supports_car_adjustment=car_adjustment_val,
|
||||||
supports_human_adjustment=human_adjustment_val,
|
|
||||||
supports_hdr_adjustment=supports_hdr_adjustment_val,
|
supports_hdr_adjustment=supports_hdr_adjustment_val,
|
||||||
supports_wdr_adjustment=supports_wdr_adjustment_val,
|
supports_wdr_adjustment=supports_wdr_adjustment_val,
|
||||||
is_open_wdr=is_open_wdr,
|
is_open_wdr=is_open_wdr,
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/foscam",
|
"documentation": "https://www.home-assistant.io/integrations/foscam",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["libpyfoscamcgi"],
|
"loggers": ["libpyfoscamcgi"],
|
||||||
"requirements": ["libpyfoscamcgi==0.0.9"]
|
"requirements": ["libpyfoscamcgi==0.0.8"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -143,7 +143,6 @@ SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
|
|||||||
native_value_fn=lambda data: data.is_human_detection_on,
|
native_value_fn=lambda data: data.is_human_detection_on,
|
||||||
turn_off_fn=lambda session: set_motion_detection(session, "humanEnable", False),
|
turn_off_fn=lambda session: set_motion_detection(session, "humanEnable", False),
|
||||||
turn_on_fn=lambda session: set_motion_detection(session, "humanEnable", True),
|
turn_on_fn=lambda session: set_motion_detection(session, "humanEnable", True),
|
||||||
exists_fn=lambda coordinator: coordinator.data.supports_human_adjustment,
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,9 @@ from .coordinator import FritzboxConfigEntry
|
|||||||
from .entity import FritzBoxDeviceEntity
|
from .entity import FritzBoxDeviceEntity
|
||||||
from .model import FritzEntityDescriptionMixinBase
|
from .model import FritzEntityDescriptionMixinBase
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
class FritzBinarySensorEntityDescription(
|
class FritzBinarySensorEntityDescription(
|
||||||
|
|||||||
@@ -11,6 +11,9 @@ from .const import DOMAIN
|
|||||||
from .coordinator import FritzboxConfigEntry
|
from .coordinator import FritzboxConfigEntry
|
||||||
from .entity import FritzBoxEntity
|
from .entity import FritzBoxEntity
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
|||||||
@@ -23,6 +23,9 @@ from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
|||||||
from .entity import FritzBoxDeviceEntity
|
from .entity import FritzBoxDeviceEntity
|
||||||
from .sensor import value_scheduled_preset
|
from .sensor import value_scheduled_preset
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF]
|
HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF]
|
||||||
PRESET_HOLIDAY = "holiday"
|
PRESET_HOLIDAY = "holiday"
|
||||||
PRESET_SUMMER = "summer"
|
PRESET_SUMMER = "summer"
|
||||||
|
|||||||
@@ -16,6 +16,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from .coordinator import FritzboxConfigEntry
|
from .coordinator import FritzboxConfigEntry
|
||||||
from .entity import FritzBoxDeviceEntity
|
from .entity import FritzBoxDeviceEntity
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
|||||||
@@ -18,6 +18,9 @@ from .const import COLOR_MODE, LOGGER
|
|||||||
from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
from .coordinator import FritzboxConfigEntry, FritzboxDataUpdateCoordinator
|
||||||
from .entity import FritzBoxDeviceEntity
|
from .entity import FritzBoxDeviceEntity
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
|||||||
@@ -34,6 +34,9 @@ from .coordinator import FritzboxConfigEntry
|
|||||||
from .entity import FritzBoxDeviceEntity
|
from .entity import FritzBoxDeviceEntity
|
||||||
from .model import FritzEntityDescriptionMixinBase
|
from .model import FritzEntityDescriptionMixinBase
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
class FritzSensorEntityDescription(
|
class FritzSensorEntityDescription(
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ from .const import DOMAIN
|
|||||||
from .coordinator import FritzboxConfigEntry
|
from .coordinator import FritzboxConfigEntry
|
||||||
from .entity import FritzBoxDeviceEntity
|
from .entity import FritzBoxDeviceEntity
|
||||||
|
|
||||||
|
# Coordinator handles data updates, so we can allow unlimited parallel updates
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
|||||||
@@ -453,7 +453,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||||
|
|
||||||
async_register_built_in_panel(hass, "light")
|
async_register_built_in_panel(hass, "light")
|
||||||
async_register_built_in_panel(hass, "security")
|
async_register_built_in_panel(hass, "safety")
|
||||||
async_register_built_in_panel(hass, "climate")
|
async_register_built_in_panel(hass, "climate")
|
||||||
|
|
||||||
async_register_built_in_panel(hass, "profile")
|
async_register_built_in_panel(hass, "profile")
|
||||||
|
|||||||
@@ -20,5 +20,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
"requirements": ["home-assistant-frontend==20251029.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,6 +60,35 @@ from .server import Server
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
_FFMPEG = "ffmpeg"
|
_FFMPEG = "ffmpeg"
|
||||||
|
_SUPPORTED_STREAMS = frozenset(
|
||||||
|
(
|
||||||
|
"bubble",
|
||||||
|
"dvrip",
|
||||||
|
"expr",
|
||||||
|
_FFMPEG,
|
||||||
|
"gopro",
|
||||||
|
"homekit",
|
||||||
|
"http",
|
||||||
|
"https",
|
||||||
|
"httpx",
|
||||||
|
"isapi",
|
||||||
|
"ivideon",
|
||||||
|
"kasa",
|
||||||
|
"nest",
|
||||||
|
"onvif",
|
||||||
|
"roborock",
|
||||||
|
"rtmp",
|
||||||
|
"rtmps",
|
||||||
|
"rtmpx",
|
||||||
|
"rtsp",
|
||||||
|
"rtsps",
|
||||||
|
"rtspx",
|
||||||
|
"tapo",
|
||||||
|
"tcp",
|
||||||
|
"webrtc",
|
||||||
|
"webtorrent",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
CONFIG_SCHEMA = vol.Schema(
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
@@ -168,7 +197,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: Go2RtcConfigEntry) -> bo
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
provider = entry.runtime_data = WebRTCProvider(hass, url, session, client)
|
provider = entry.runtime_data = WebRTCProvider(hass, url, session, client)
|
||||||
await provider.initialize()
|
|
||||||
entry.async_on_unload(async_register_webrtc_provider(hass, provider))
|
entry.async_on_unload(async_register_webrtc_provider(hass, provider))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -200,21 +228,16 @@ class WebRTCProvider(CameraWebRTCProvider):
|
|||||||
self._session = session
|
self._session = session
|
||||||
self._rest_client = rest_client
|
self._rest_client = rest_client
|
||||||
self._sessions: dict[str, Go2RtcWsClient] = {}
|
self._sessions: dict[str, Go2RtcWsClient] = {}
|
||||||
self._supported_schemes: set[str] = set()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def domain(self) -> str:
|
def domain(self) -> str:
|
||||||
"""Return the integration domain of the provider."""
|
"""Return the integration domain of the provider."""
|
||||||
return DOMAIN
|
return DOMAIN
|
||||||
|
|
||||||
async def initialize(self) -> None:
|
|
||||||
"""Initialize the provider."""
|
|
||||||
self._supported_schemes = await self._rest_client.schemes.list()
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_is_supported(self, stream_source: str) -> bool:
|
def async_is_supported(self, stream_source: str) -> bool:
|
||||||
"""Return if this provider is supports the Camera as source."""
|
"""Return if this provider is supports the Camera as source."""
|
||||||
return stream_source.partition(":")[0] in self._supported_schemes
|
return stream_source.partition(":")[0] in _SUPPORTED_STREAMS
|
||||||
|
|
||||||
async def async_handle_async_webrtc_offer(
|
async def async_handle_async_webrtc_offer(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
|||||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||||
HA_MANAGED_API_PORT = 11984
|
HA_MANAGED_API_PORT = 11984
|
||||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||||
RECOMMENDED_VERSION = "1.9.12"
|
RECOMMENDED_VERSION = "1.9.11"
|
||||||
|
|||||||
@@ -8,6 +8,6 @@
|
|||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["go2rtc-client==0.3.0"],
|
"requirements": ["go2rtc-client==0.2.1"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,18 +29,8 @@ _RESPAWN_COOLDOWN = 1
|
|||||||
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
_GO2RTC_CONFIG_FORMAT = r"""# This file is managed by Home Assistant
|
||||||
# Do not edit it manually
|
# Do not edit it manually
|
||||||
|
|
||||||
app:
|
|
||||||
modules: {app_modules}
|
|
||||||
|
|
||||||
api:
|
api:
|
||||||
listen: "{api_ip}:{api_port}"
|
listen: "{api_ip}:{api_port}"
|
||||||
allow_paths: {api_allow_paths}
|
|
||||||
|
|
||||||
# ffmpeg needs the exec module
|
|
||||||
# Restrict execution to only ffmpeg binary
|
|
||||||
exec:
|
|
||||||
allow_paths:
|
|
||||||
- ffmpeg
|
|
||||||
|
|
||||||
rtsp:
|
rtsp:
|
||||||
listen: "127.0.0.1:18554"
|
listen: "127.0.0.1:18554"
|
||||||
@@ -50,43 +40,6 @@ webrtc:
|
|||||||
ice_servers: []
|
ice_servers: []
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_APP_MODULES = (
|
|
||||||
"api",
|
|
||||||
"exec", # Execution module for ffmpeg
|
|
||||||
"ffmpeg",
|
|
||||||
"http",
|
|
||||||
"mjpeg",
|
|
||||||
"onvif",
|
|
||||||
"rtmp",
|
|
||||||
"rtsp",
|
|
||||||
"srtp",
|
|
||||||
"webrtc",
|
|
||||||
"ws",
|
|
||||||
)
|
|
||||||
|
|
||||||
_API_ALLOW_PATHS = (
|
|
||||||
"/", # UI static page and version control
|
|
||||||
"/api", # Main API path
|
|
||||||
"/api/frame.jpeg", # Snapshot functionality
|
|
||||||
"/api/schemes", # Supported stream schemes
|
|
||||||
"/api/streams", # Stream management
|
|
||||||
"/api/webrtc", # Webrtc functionality
|
|
||||||
"/api/ws", # Websocket functionality (e.g. webrtc candidates)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Additional modules when UI is enabled
|
|
||||||
_UI_APP_MODULES = (
|
|
||||||
*_APP_MODULES,
|
|
||||||
"debug",
|
|
||||||
)
|
|
||||||
# Additional api paths when UI is enabled
|
|
||||||
_UI_API_ALLOW_PATHS = (
|
|
||||||
*_API_ALLOW_PATHS,
|
|
||||||
"/api/config", # UI config view
|
|
||||||
"/api/log", # UI log view
|
|
||||||
"/api/streams.dot", # UI network view
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOG_LEVEL_MAP = {
|
_LOG_LEVEL_MAP = {
|
||||||
"TRC": logging.DEBUG,
|
"TRC": logging.DEBUG,
|
||||||
"DBG": logging.DEBUG,
|
"DBG": logging.DEBUG,
|
||||||
@@ -108,34 +61,14 @@ class Go2RTCWatchdogError(HomeAssistantError):
|
|||||||
"""Raised on watchdog error."""
|
"""Raised on watchdog error."""
|
||||||
|
|
||||||
|
|
||||||
def _format_list_for_yaml(items: tuple[str, ...]) -> str:
|
def _create_temp_file(api_ip: str) -> str:
|
||||||
"""Format a list of strings for yaml config."""
|
|
||||||
if not items:
|
|
||||||
return "[]"
|
|
||||||
formatted_items = ",".join(f'"{item}"' for item in items)
|
|
||||||
return f"[{formatted_items}]"
|
|
||||||
|
|
||||||
|
|
||||||
def _create_temp_file(enable_ui: bool) -> str:
|
|
||||||
"""Create temporary config file."""
|
"""Create temporary config file."""
|
||||||
app_modules: tuple[str, ...] = _APP_MODULES
|
|
||||||
api_paths: tuple[str, ...] = _API_ALLOW_PATHS
|
|
||||||
api_ip = _LOCALHOST_IP
|
|
||||||
if enable_ui:
|
|
||||||
app_modules = _UI_APP_MODULES
|
|
||||||
api_paths = _UI_API_ALLOW_PATHS
|
|
||||||
# Listen on all interfaces for allowing access from all ips
|
|
||||||
api_ip = ""
|
|
||||||
|
|
||||||
# Set delete=False to prevent the file from being deleted when the file is closed
|
# Set delete=False to prevent the file from being deleted when the file is closed
|
||||||
# Linux is clearing tmp folder on reboot, so no need to delete it manually
|
# Linux is clearing tmp folder on reboot, so no need to delete it manually
|
||||||
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
|
with NamedTemporaryFile(prefix="go2rtc_", suffix=".yaml", delete=False) as file:
|
||||||
file.write(
|
file.write(
|
||||||
_GO2RTC_CONFIG_FORMAT.format(
|
_GO2RTC_CONFIG_FORMAT.format(
|
||||||
api_ip=api_ip,
|
api_ip=api_ip, api_port=HA_MANAGED_API_PORT
|
||||||
api_port=HA_MANAGED_API_PORT,
|
|
||||||
app_modules=_format_list_for_yaml(app_modules),
|
|
||||||
api_allow_paths=_format_list_for_yaml(api_paths),
|
|
||||||
).encode()
|
).encode()
|
||||||
)
|
)
|
||||||
return file.name
|
return file.name
|
||||||
@@ -153,7 +86,10 @@ class Server:
|
|||||||
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
|
self._log_buffer: deque[str] = deque(maxlen=_LOG_BUFFER_SIZE)
|
||||||
self._process: asyncio.subprocess.Process | None = None
|
self._process: asyncio.subprocess.Process | None = None
|
||||||
self._startup_complete = asyncio.Event()
|
self._startup_complete = asyncio.Event()
|
||||||
self._enable_ui = enable_ui
|
self._api_ip = _LOCALHOST_IP
|
||||||
|
if enable_ui:
|
||||||
|
# Listen on all interfaces for allowing access from all ips
|
||||||
|
self._api_ip = ""
|
||||||
self._watchdog_task: asyncio.Task | None = None
|
self._watchdog_task: asyncio.Task | None = None
|
||||||
self._watchdog_tasks: list[asyncio.Task] = []
|
self._watchdog_tasks: list[asyncio.Task] = []
|
||||||
|
|
||||||
@@ -168,7 +104,7 @@ class Server:
|
|||||||
"""Start the server."""
|
"""Start the server."""
|
||||||
_LOGGER.debug("Starting go2rtc server")
|
_LOGGER.debug("Starting go2rtc server")
|
||||||
config_file = await self._hass.async_add_executor_job(
|
config_file = await self._hass.async_add_executor_job(
|
||||||
_create_temp_file, self._enable_ui
|
_create_temp_file, self._api_ip
|
||||||
)
|
)
|
||||||
|
|
||||||
self._startup_complete.clear()
|
self._startup_complete.clear()
|
||||||
|
|||||||
@@ -136,21 +136,6 @@ async def async_setup_entry(
|
|||||||
new_data[CONF_URL] = url
|
new_data[CONF_URL] = url
|
||||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||||
|
|
||||||
# Migrate legacy config entries without auth_type field
|
|
||||||
if CONF_AUTH_TYPE not in config:
|
|
||||||
new_data = dict(config_entry.data)
|
|
||||||
# Detect auth type based on which fields are present
|
|
||||||
if CONF_TOKEN in config:
|
|
||||||
new_data[CONF_AUTH_TYPE] = AUTH_API_TOKEN
|
|
||||||
elif CONF_USERNAME in config:
|
|
||||||
new_data[CONF_AUTH_TYPE] = AUTH_PASSWORD
|
|
||||||
else:
|
|
||||||
raise ConfigEntryError(
|
|
||||||
"Unable to determine authentication type from config entry."
|
|
||||||
)
|
|
||||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
|
||||||
config = config_entry.data
|
|
||||||
|
|
||||||
# Determine API version
|
# Determine API version
|
||||||
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
||||||
api_version = "v1"
|
api_version = "v1"
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
"""The gstreamer component."""
|
|
||||||
|
|
||||||
DOMAIN = "gstreamer"
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user