Compare commits

..

100 Commits

Author SHA1 Message Date
Franck Nijhof
40d7f2a89e 2025.10.2 (#154181) 2025-10-10 23:19:19 +02:00
Shay Levy
13b717e2da Fix shelly remove orphaned entities (#154182)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-10 22:46:30 +02:00
Franck Nijhof
5fcfd3ad84 Bump version to 2025.10.2 2025-10-10 20:29:17 +00:00
Shay Levy
324a7b5443 Fix Shelly RPC cover update when the device is not initialized (#154159) 2025-10-10 20:27:30 +00:00
Robert Resch
491ae8f72c Bump deebot-client to 15.1.0 (#154154) 2025-10-10 20:23:10 +00:00
Justus
259247892f IOmeter bump version v0.2.0 (#154150) 2025-10-10 20:23:09 +00:00
Bram Kragten
caeda0ef64 Update frontend to 20251001.2 (#154143) 2025-10-10 20:23:08 +00:00
Paul Bottein
df35c535e4 Add missing entity category and icons for smlight integration (#154131) 2025-10-10 20:23:07 +00:00
Paulus Schoutsen
f93b9e0ed0 Z-Wave: ESPHome discovery to update all options (#154113) 2025-10-10 20:23:05 +00:00
peteS-UK
48a3372cf2 Fix for multiple Lyrion Music Server on a single Home Assistant server for Squeezebox (#154081) 2025-10-10 20:23:04 +00:00
Maciej Bieniek
d84fd72428 Bump brother to version 5.1.1 (#154080) 2025-10-10 20:23:03 +00:00
Simone Chemelli
e8cb386962 Bump aioamazondevices to 6.4.0 (#154071) 2025-10-10 20:23:02 +00:00
epenet
5ac726703c Filter out invalid Renault vehicles (#154070)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-10 20:23:00 +00:00
Joost Lekkerkerker
688649a799 Don't mark ZHA coordinator as via_device with itself (#154004) 2025-10-10 20:17:07 +00:00
Artur Pragacz
c5359ade3e Fix empty llm api list in chat log (#153996) 2025-10-10 20:17:05 +00:00
Michael Davie
4e60dedc1b Bump env-canada to 0.11.3 (#153967) 2025-10-10 20:17:04 +00:00
Maciej Bieniek
221d74f83a Fix update interval for AccuWeather hourly forecast (#153957) 2025-10-10 20:17:02 +00:00
G Johansson
fbbb3d6415 Bump holidays to 0.82 (#153952) 2025-10-10 20:17:01 +00:00
Josef Zweck
8297019011 Bump pylamarzocco to 2.1.2 (#153950) 2025-10-10 20:16:59 +00:00
TheJulianJES
61715dcff3 Adjust OTBR config entry name for ZBT-2 (#153940) 2025-10-10 20:16:58 +00:00
TheJulianJES
32b822ee99 Fix HA hardware configuration message for Thread without HAOS (#153933) 2025-10-10 20:16:56 +00:00
Fabien Kleinbourg
e6c2e0ad80 sharkiq dependency bump to 1.4.2 (#153931) 2025-10-10 20:16:55 +00:00
TheJulianJES
1314427dc5 Do not auto-set up ZHA zeroconf discoveries during onboarding (#153914) 2025-10-10 20:16:53 +00:00
Tom Matheussen
bf499a45f7 Add missing translation string for Satel Integra subentry type (#153905) 2025-10-10 20:16:52 +00:00
Christopher Fenner
b955e22628 fix typo in icon assignment of AccuWeather integration (#153890) 2025-10-10 20:16:50 +00:00
Simone Chemelli
1b222ff5fd Fix restore cover state for Comelit SimpleHome (#153887) 2025-10-10 20:16:49 +00:00
derytive
f0510e703f Add plate_count for Miele KM7575 (#153868) 2025-10-10 19:36:03 +00:00
G Johansson
cbe3956e15 Handle timeout errors gracefully in Nord Pool services (#153856) 2025-10-10 19:36:01 +00:00
Aaron Bach
4588e9da8d Limit SimpliSafe websocket connection attempts during startup (#153853)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-10 19:36:00 +00:00
Simone Chemelli
5445890fdf Bump aiocomelit to 1.1.1 (#153843) 2025-10-10 19:35:59 +00:00
Simone Chemelli
9b49f77f86 Fix PIN validation for Comelit SimpleHome (#153840) 2025-10-10 19:35:57 +00:00
Petro31
566c8fb786 Fix delay_on and auto_off with multiple triggers (#153839) 2025-10-10 19:35:56 +00:00
Joost Lekkerkerker
b36150c213 Add motion presets to SmartThings AC (#153830) 2025-10-10 19:35:54 +00:00
Joost Lekkerkerker
809070d2ad Catch update exception in AirGradient (#153828) 2025-10-10 19:35:53 +00:00
Joost Lekkerkerker
f4339dc031 Bump pySmartThings to 3.3.1 (#153826) 2025-10-10 19:35:51 +00:00
epenet
f3b37d24b0 Fix Tuya cover position when only control is available (#153803) 2025-10-10 19:35:50 +00:00
Paulus Schoutsen
4c8348caa7 Handle ESPHome discoveries with uninitialized Z-Wave antennas (#153790) 2025-10-10 19:35:49 +00:00
cdnninja
b9e7c102ea vesync correct fan set modes (#153761) 2025-10-10 19:35:47 +00:00
Simone Chemelli
69d9fa89b7 Remove stale entities from Alexa Devices (#153759) 2025-10-10 19:35:46 +00:00
Simone Chemelli
6f3f5a5ec1 Bump aioamazondevices to 6.2.9 (#153756) 2025-10-10 19:35:44 +00:00
Simone Chemelli
5ecfeca90a Fix sensors availability check for Alexa Devices (#153743) 2025-10-10 19:35:43 +00:00
Sander Jochems
00e0570fd4 Upgrade python-melcloud to 0.1.2 (#153742) 2025-10-10 19:35:41 +00:00
Øyvind Matheson Wergeland
5a5b94f3af Synology DSM: Don't reinitialize API during configuration (#153739) 2025-10-10 19:35:40 +00:00
Maciej Bieniek
34f00d9b33 Align Shelly presencezone entity to the new API/firmware (#153737) 2025-10-10 19:35:39 +00:00
Tom
4cabc5b368 Bump airOS to 0.5.5 using formdata for v6 firmware (#153736) 2025-10-10 19:35:37 +00:00
tronikos
4045125422 Fix missing google_assistant_sdk.send_text_command (#153735) 2025-10-10 19:35:36 +00:00
Fredrik Erlandsson
d7393af76f Version bump pydaikin to 2.17.1 (#153726) 2025-10-10 19:35:34 +00:00
Fredrik Erlandsson
ad41386b27 Version bump pydaikin to 2.17.0 (#153718) 2025-10-10 19:35:33 +00:00
tronikos
62d17ea20c Bump opower to 0.15.6 (#153714) 2025-10-10 19:35:31 +00:00
peetersch
c4954731d0 Modbus Fix message_wait_milliseconds is no longer applied (#153709) 2025-10-10 19:35:30 +00:00
cdnninja
647723d3f0 Bump pyvesync to 3.1.0 (#153693) 2025-10-10 19:35:28 +00:00
Christopher Fenner
51c500e22c Fix ViCare pressure sensors missing unit of measurement (#153691) 2025-10-10 19:35:26 +00:00
Denis Shulyaka
f6fc13c1f2 Gemini: Use default model instead of recommended where applicable (#153676) 2025-10-10 19:35:25 +00:00
Jan Bouwhuis
0009a7a042 Fix MQTT Lock state reset to unknown when a reset payload is received (#153647) 2025-10-10 19:35:24 +00:00
Luke Lashley
a3d1aa28e7 Switch Roborock to v4 of the code login api (#153593) 2025-10-10 19:35:22 +00:00
Simone Chemelli
9f53eb9b76 Bump aioamazondevices to 6.2.8 (#153592) 2025-10-10 19:35:21 +00:00
Luke Lashley
f53a205ff3 Bump python-roborock to 2.50.2 (#153561) 2025-10-10 19:35:19 +00:00
NANI
d08517c3df Updated VRM client and accounted for missing forecasts (#153464) 2025-10-10 19:35:18 +00:00
Kinachi249
d7398a44a1 Bump PyCync to 0.4.1 (#153401) 2025-10-10 19:35:17 +00:00
Aidan Timson
9acfc0cb88 Fix power device classes for system bridge (#153201) 2025-10-10 19:35:15 +00:00
Hessel
1b3d21523a Wallbox fix Rate Limit issue for multiple chargers (#153074) 2025-10-10 19:35:14 +00:00
puddly
1d407d1326 Prevent reloading the ZHA integration while adapter firmware is being updated (#152626) 2025-10-10 19:35:12 +00:00
Franck Nijhof
013346cead 2025.10.1 (#153582) 2025-10-03 20:08:44 +02:00
Franck Nijhof
5abaabc9da Bump version to 2025.10.1 2025-10-03 17:26:37 +00:00
Paulus Schoutsen
32481312c3 When discovering a Z-Wave adapter, always configure add-on in config flow (#153575) 2025-10-03 17:26:16 +00:00
Paulus Schoutsen
bdc9eb37d3 Z-Wave to support migrating from USB to socket with same home ID (#153522) 2025-10-03 17:26:15 +00:00
Abílio Costa
e0afcbc02b Debounce updates in Idasen Desk (#153503) 2025-10-03 17:26:13 +00:00
puddly
cd56a6a98d Bump universal-silabs-flasher to 0.0.35 (#153500) 2025-10-03 17:26:11 +00:00
cdnninja
9d85893bbb Fix VeSync zero fan speed handling (#153493)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-10-03 17:26:10 +00:00
starkillerOG
9e8a70225f Bump reolink-aio to 0.16.1 (#153489) 2025-10-03 17:26:08 +00:00
Daniel Hjelseth Høyer
96ec795d5e Bump pyTibber to 0.32.2 (#153484) 2025-10-03 17:26:07 +00:00
Josef Zweck
65b796070d Fix missing parameter pass in onedrive (#153478) 2025-10-03 17:26:05 +00:00
Aidan Timson
32994812e5 Update OVOEnergy to 3.0.1 (#153476) 2025-10-03 17:26:04 +00:00
G Johansson
66ff9d63a3 Fix next event in workday calendar (#153465) 2025-10-03 17:26:02 +00:00
Joost Lekkerkerker
b2a63d4996 Add translation for turbo fan mode in SmartThings (#153445)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-03 17:26:00 +00:00
puddly
f9f37b7f2a Disable baudrate bootloader reset for ZBT-2 (#153443) 2025-10-03 17:25:59 +00:00
Stefan Agner
7bdd9dd38a Update Home Assistant base image to 2025.10.0 (#153441) 2025-10-03 17:25:58 +00:00
Joost Lekkerkerker
1e8aae0a89 Fix missing powerconsumptionreport in Smartthings (#153438) 2025-10-03 17:25:56 +00:00
Aidan Timson
cf668e9dc2 Add missing translation for media browser default title (#153430)
Co-authored-by: Erwin Douna <e.douna@gmail.com>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-10-03 17:25:55 +00:00
Norbert Rittel
2e91c8700f Fix sentence-casing in user-facing strings of slack (#153427) 2025-10-03 17:25:53 +00:00
J. Nick Koston
9d14627daa Bump aiohomekit to 3.2.19 (#153423) 2025-10-03 17:25:52 +00:00
TheJulianJES
73b8283748 Fix Z-Wave RGB light turn on causing rare ZeroDivisionError (#153422) 2025-10-03 17:25:50 +00:00
Manu
edeaaa2e63 Update markdown field description in ntfy integration (#153421) 2025-10-03 17:25:49 +00:00
Tom Matheussen
d26dd8fc39 Fix Satel Integra creating new binary sensors on YAML import (#153419) 2025-10-03 17:25:47 +00:00
Denis Shulyaka
34640ea735 Disable thinking for unsupported gemini models (#153415) 2025-10-03 17:25:46 +00:00
Erwin Douna
46a2e21ef0 Bump pyportainer 1.0.3 (#153413) 2025-10-03 17:25:45 +00:00
Erwin Douna
508af53e72 Bump pyportainer 1.0.2 (#153326) 2025-10-03 17:25:43 +00:00
Josef Zweck
5f7440608c Increase onedrive upload chunk size (#153406) 2025-10-03 17:22:10 +00:00
Michael J. Kidd
0d1aa38a26 Pushover: Handle empty data section properly (#153397) 2025-10-03 17:22:08 +00:00
Luke Lashley
929f8c148a Bump python-roborock to 2.49.1 (#153396) 2025-10-03 17:22:07 +00:00
Joakim Plate
92db1f5a04 Correct blocking update in ToGrill with lack of notifications (#153387) 2025-10-03 17:22:05 +00:00
starkillerOG
e66b5ce0bf Add Roborock mop intensity translations (#153380) 2025-10-03 17:22:03 +00:00
Michael
1e17150e9f Explicit pass in the config entry to coordinator in airtouch4 (#153361)
Co-authored-by: Josef Zweck <josef@zweck.dev>
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-03 17:22:02 +00:00
Michael
792902de3d Set config entry to None in ProxmoxVE (#153357) 2025-10-03 17:22:00 +00:00
Andre Lengwenus
04d78c3dd5 Explicitly check for None in raw value processing of modbus (#153352) 2025-10-03 17:21:59 +00:00
G Johansson
5c8d5bfb84 Fix Nord Pool 15 minute interval (#153350) 2025-10-03 17:21:57 +00:00
puddly
99bff31869 Do not reset the adapter twice during ZHA options flow migration (#153345) 2025-10-03 17:21:56 +00:00
Stefan Agner
d949119fb0 Bump aiohasupervisor to 0.3.3 (#153344) 2025-10-03 17:21:54 +00:00
Tom
e7b737ece5 Bump airOS module for alternative login url (#153317) 2025-10-03 17:21:52 +00:00
Tom
fb8ddac2e8 Bump airOS dependency (#153065) 2025-10-03 17:21:51 +00:00
591 changed files with 9958 additions and 32360 deletions

View File

@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -257,7 +257,7 @@ jobs:
fi fi
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -332,14 +332,14 @@ jobs:
- name: Login to DockerHub - name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant' if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant' if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -504,7 +504,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8 CACHE_VERSION: 8
UV_CACHE_VERSION: 1 UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.11" HA_SHORT_VERSION: "2025.10"
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']" ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version # 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
key: >- key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)" uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
key: >- key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache - name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: ${{ env.UV_CACHE_DIR }} path: ${{ env.UV_CACHE_DIR }}
key: >- key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}- env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists - name: Check if apt cache exists
id: cache-apt-check id: cache-apt-check
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }} lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: | path: |
@@ -570,7 +570,7 @@ jobs:
fi fi
- name: Save apt cache - name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true' if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base - base
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -711,7 +711,7 @@ jobs:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency review - name: Dependency review
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0 uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
with: with:
license-check: false # We use our own license audit checks license-check: false # We use our own license audit checks
@@ -741,7 +741,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore mypy cache - name: Restore mypy cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: .mypy_cache path: .mypy_cache
key: >- key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run name: Split tests for full run
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }} Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }} Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }}) Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps: steps:
- name: Restore apt cache - name: Restore apt cache
uses: actions/cache/restore@v4.3.0 uses: actions/cache/restore@v4.2.4
with: with:
path: | path: |
${{ env.APT_CACHE_DIR }} ${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5 uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with: with:
languages: python languages: python
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5 uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with: with:
category: "/language:python" category: "/language:python"

View File

@@ -203,7 +203,6 @@ homeassistant.components.feedreader.*
homeassistant.components.file_upload.* homeassistant.components.file_upload.*
homeassistant.components.filesize.* homeassistant.components.filesize.*
homeassistant.components.filter.* homeassistant.components.filter.*
homeassistant.components.firefly_iii.*
homeassistant.components.fitbit.* homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.* homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.* homeassistant.components.flux_led.*
@@ -326,7 +325,6 @@ homeassistant.components.london_underground.*
homeassistant.components.lookin.* homeassistant.components.lookin.*
homeassistant.components.lovelace.* homeassistant.components.lovelace.*
homeassistant.components.luftdaten.* homeassistant.components.luftdaten.*
homeassistant.components.lunatone.*
homeassistant.components.madvr.* homeassistant.components.madvr.*
homeassistant.components.manual.* homeassistant.components.manual.*
homeassistant.components.mastodon.* homeassistant.components.mastodon.*

10
CODEOWNERS generated
View File

@@ -492,8 +492,6 @@ build.json @home-assistant/supervisor
/tests/components/filesize/ @gjohansson-ST /tests/components/filesize/ @gjohansson-ST
/homeassistant/components/filter/ @dgomes /homeassistant/components/filter/ @dgomes
/tests/components/filter/ @dgomes /tests/components/filter/ @dgomes
/homeassistant/components/firefly_iii/ @erwindouna
/tests/components/firefly_iii/ @erwindouna
/homeassistant/components/fireservicerota/ @cyberjunky /homeassistant/components/fireservicerota/ @cyberjunky
/tests/components/fireservicerota/ @cyberjunky /tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP /homeassistant/components/firmata/ @DaAwesomeP
@@ -762,8 +760,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz /tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
/homeassistant/components/intesishome/ @jnimmo /homeassistant/components/intesishome/ @jnimmo
/homeassistant/components/iometer/ @MaestroOnICe /homeassistant/components/iometer/ @jukrebs
/tests/components/iometer/ @MaestroOnICe /tests/components/iometer/ @jukrebs
/homeassistant/components/ios/ @robbiet480 /homeassistant/components/ios/ @robbiet480
/tests/components/ios/ @robbiet480 /tests/components/ios/ @robbiet480
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard /homeassistant/components/iotawatt/ @gtdiehl @jyavenard
@@ -910,8 +908,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/luci/ @mzdrale /homeassistant/components/luci/ @mzdrale
/homeassistant/components/luftdaten/ @fabaff @frenck /homeassistant/components/luftdaten/ @fabaff @frenck
/tests/components/luftdaten/ @fabaff @frenck /tests/components/luftdaten/ @fabaff @frenck
/homeassistant/components/lunatone/ @MoonDevLT
/tests/components/lunatone/ @MoonDevLT
/homeassistant/components/lupusec/ @majuss @suaveolent /homeassistant/components/lupusec/ @majuss @suaveolent
/tests/components/lupusec/ @majuss @suaveolent /tests/components/lupusec/ @majuss @suaveolent
/homeassistant/components/lutron/ @cdheiser @wilburCForce /homeassistant/components/lutron/ @cdheiser @wilburCForce
@@ -957,8 +953,6 @@ build.json @home-assistant/supervisor
/tests/components/met_eireann/ @DylanGore /tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame /homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame /tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch /homeassistant/components/meteoalarm/ @rolfberkenbosch
/homeassistant/components/meteoclimatic/ @adrianmo /homeassistant/components/meteoclimatic/ @adrianmo
/tests/components/meteoclimatic/ @adrianmo /tests/components/meteoclimatic/ @adrianmo

View File

@@ -616,44 +616,34 @@ async def async_enable_logging(
), ),
) )
logger = logging.getLogger() # Log errors to a file if we have write access to file or config dir
logger.setLevel(logging.INFO if verbose else logging.WARNING)
if log_file is None: if log_file is None:
default_log_path = hass.config.path(ERROR_LOG_FILENAME) err_log_path = hass.config.path(ERROR_LOG_FILENAME)
if "SUPERVISOR" in os.environ:
_LOGGER.info("Running in Supervisor, not logging to file")
# Rename the default log file if it exists, since previous versions created
# it even on Supervisor
if os.path.isfile(default_log_path):
with contextlib.suppress(OSError):
os.rename(default_log_path, f"{default_log_path}.old")
err_log_path = None
else:
err_log_path = default_log_path
else: else:
err_log_path = os.path.abspath(log_file) err_log_path = os.path.abspath(log_file)
if err_log_path: err_path_exists = os.path.isfile(err_log_path)
err_path_exists = os.path.isfile(err_log_path) err_dir = os.path.dirname(err_log_path)
err_dir = os.path.dirname(err_log_path)
# Check if we can write to the error log if it exists or that # Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not. # we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK) not err_path_exists and os.access(err_dir, os.W_OK)
): ):
err_handler = await hass.async_add_executor_job( err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days _create_log_file, err_log_path, log_rotate_days
) )
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger.addHandler(err_handler)
# Save the log file location for access by other components. logger = logging.getLogger()
hass.data[DATA_LOGGING] = err_log_path logger.addHandler(err_handler)
else: logger.setLevel(logging.INFO if verbose else logging.WARNING)
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
async_activate_log_queue_handler(hass) async_activate_log_queue_handler(hass)

View File

@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
} }
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10) UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30) UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)

View File

@@ -1,6 +1,9 @@
{ {
"entity": { "entity": {
"sensor": { "sensor": {
"air_quality": {
"default": "mdi:air-filter"
},
"cloud_ceiling": { "cloud_ceiling": {
"default": "mdi:weather-fog" "default": "mdi:weather-fog"
}, },
@@ -34,9 +37,6 @@
"thunderstorm_probability_night": { "thunderstorm_probability_night": {
"default": "mdi:weather-lightning" "default": "mdi:weather-lightning"
}, },
"translation_key": {
"default": "mdi:air-filter"
},
"tree_pollen": { "tree_pollen": {
"default": "mdi:tree-outline" "default": "mdi:tree-outline"
}, },

View File

@@ -1,7 +1,9 @@
"""Airgradient Update platform.""" """Airgradient Update platform."""
from datetime import timedelta from datetime import timedelta
import logging
from airgradient import AirGradientConnectionError
from propcache.api import cached_property from propcache.api import cached_property
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
@@ -13,6 +15,7 @@ from .entity import AirGradientEntity
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
SCAN_INTERVAL = timedelta(hours=1) SCAN_INTERVAL = timedelta(hours=1)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry( async def async_setup_entry(
@@ -31,6 +34,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Representation of Airgradient Update.""" """Representation of Airgradient Update."""
_attr_device_class = UpdateDeviceClass.FIRMWARE _attr_device_class = UpdateDeviceClass.FIRMWARE
_server_unreachable_logged = False
def __init__(self, coordinator: AirGradientCoordinator) -> None: def __init__(self, coordinator: AirGradientCoordinator) -> None:
"""Initialize the entity.""" """Initialize the entity."""
@@ -47,10 +51,27 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Return the installed version of the entity.""" """Return the installed version of the entity."""
return self.coordinator.data.measures.firmware_version return self.coordinator.data.measures.firmware_version
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._attr_available
async def async_update(self) -> None: async def async_update(self) -> None:
"""Update the entity.""" """Update the entity."""
self._attr_latest_version = ( try:
await self.coordinator.client.get_latest_firmware_version( self._attr_latest_version = (
self.coordinator.serial_number await self.coordinator.client.get_latest_firmware_version(
self.coordinator.serial_number
)
) )
) except AirGradientConnectionError:
self._attr_latest_version = None
self._attr_available = False
if not self._server_unreachable_logged:
_LOGGER.error(
"Unable to connect to AirGradient server to check for updates"
)
self._server_unreachable_logged = True
else:
self._server_unreachable_logged = False
self._attr_available = True

View File

@@ -2,7 +2,6 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Mapping
import logging import logging
from typing import Any from typing import Any
@@ -15,7 +14,7 @@ from airos.exceptions import (
) )
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import ( from homeassistant.const import (
CONF_HOST, CONF_HOST,
CONF_PASSWORD, CONF_PASSWORD,
@@ -25,11 +24,6 @@ from homeassistant.const import (
) )
from homeassistant.data_entry_flow import section from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOS8 from .coordinator import AirOS8
@@ -60,107 +54,50 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1 VERSION = 1
MINOR_VERSION = 2 MINOR_VERSION = 2
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
async def async_step_user( async def async_step_user(
self, user_input: dict[str, Any] | None = None self,
user_input: dict[str, Any] | None = None,
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Handle the manual input of host and credentials.""" """Handle the initial step."""
self.errors = {} errors: dict[str, str] = {}
if user_input is not None: if user_input is not None:
validated_info = await self._validate_and_get_device_info(user_input) # By default airOS 8 comes with self-signed SSL certificates,
if validated_info: # with no option in the web UI to change or upload a custom certificate.
return self.async_create_entry( session = async_get_clientsession(
title=validated_info["title"], self.hass,
data=validated_info["data"], verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
) )
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
)
async def _validate_and_get_device_info( airos_device = AirOS8(
self, config_data: dict[str, Any] host=user_input[CONF_HOST],
) -> dict[str, Any] | None: username=user_input[CONF_USERNAME],
"""Validate user input with the device API.""" password=user_input[CONF_PASSWORD],
# By default airOS 8 comes with self-signed SSL certificates, session=session,
# with no option in the web UI to change or upload a custom certificate. use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
session = async_get_clientsession( )
self.hass, try:
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL], await airos_device.login()
) airos_data = await airos_device.status()
airos_device = AirOS8( except (
host=config_data[CONF_HOST], AirOSConnectionSetupError,
username=config_data[CONF_USERNAME], AirOSDeviceConnectionError,
password=config_data[CONF_PASSWORD], ):
session=session, errors["base"] = "cannot_connect"
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL], except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
) errors["base"] = "invalid_auth"
try: except AirOSKeyDataMissingError:
await airos_device.login() errors["base"] = "key_data_missing"
airos_data = await airos_device.status() except Exception:
_LOGGER.exception("Unexpected exception")
except ( errors["base"] = "unknown"
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
self.errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
self.errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
self.errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception during credential validation")
self.errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else: else:
await self.async_set_unique_id(airos_data.derived.mac)
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
return self.async_create_entry(
return {"title": airos_data.host.hostname, "data": config_data} title=airos_data.host.hostname, data=user_input
return None
async def async_step_reauth(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm(user_input)
async def async_step_reauth_confirm(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
self.errors = {}
if user_input:
validate_data = {**self._get_reauth_entry().data, **user_input}
if await self._validate_and_get_device_info(config_data=validate_data):
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates=validate_data,
) )
return self.async_show_form( return self.async_show_form(
step_id="reauth_confirm", step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
errors=self.errors,
) )

View File

@@ -14,7 +14,7 @@ from airos.exceptions import (
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL from .const import DOMAIN, SCAN_INTERVAL
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
try: try:
await self.airos_device.login() await self.airos_device.login()
return await self.airos_device.status() return await self.airos_device.status()
except AirOSConnectionAuthenticationError as err: except (AirOSConnectionAuthenticationError,) as err:
_LOGGER.exception("Error authenticating with airOS device") _LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryAuthFailed( raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="invalid_auth" translation_domain=DOMAIN, translation_key="invalid_auth"
) from err ) from err
except ( except (

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos", "documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling", "iot_class": "local_polling",
"quality_scale": "bronze", "quality_scale": "bronze",
"requirements": ["airos==0.5.4"] "requirements": ["airos==0.5.5"]
} }

View File

@@ -2,14 +2,6 @@
"config": { "config": {
"flow_title": "Ubiquiti airOS device", "flow_title": "Ubiquiti airOS device",
"step": { "step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::airos::config::step::user::data_description::password%]"
}
},
"user": { "user": {
"data": { "data": {
"host": "[%key:common::config_flow::data::host%]", "host": "[%key:common::config_flow::data::host%]",
@@ -42,9 +34,7 @@
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
}, },
"abort": { "abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
} }
}, },
"entity": { "entity": {

View File

@@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
} }
) )
URL_API_INTEGRATION = {
"url": "https://dashboard.airthings.com/integrations/api-integration"
}
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Airthings.""" """Handle a config flow for Airthings."""
@@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form( return self.async_show_form(
step_id="user", step_id="user",
data_schema=STEP_USER_DATA_SCHEMA, data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders=URL_API_INTEGRATION, description_placeholders={
"url": (
"https://dashboard.airthings.com/integrations/api-integration"
),
},
) )
errors = {} errors = {}
@@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(title="Airthings", data=user_input) return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form( return self.async_show_form(
step_id="user", step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
description_placeholders=URL_API_INTEGRATION,
) )

View File

@@ -4,9 +4,9 @@
"user": { "user": {
"data": { "data": {
"id": "ID", "id": "ID",
"secret": "Secret" "secret": "Secret",
}, "description": "Login at {url} to find your credentials"
"description": "Log in at {url} to find your credentials" }
} }
}, },
"error": { "error": {

View File

@@ -171,7 +171,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="no_devices_found") return self.async_abort(reason="no_devices_found")
titles = { titles = {
address: get_name(discovery.device) address: discovery.device.name
for (address, discovery) in self._discovered_devices.items() for (address, discovery) in self._discovered_devices.items()
} }
return self.async_show_form( return self.async_show_form(

View File

@@ -114,8 +114,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
), ),
} }
PARALLEL_UPDATES = 0
@callback @callback
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:

View File

@@ -6,9 +6,6 @@
"description": "[%key:component::bluetooth::config::step::user::description%]", "description": "[%key:component::bluetooth::config::step::user::description%]",
"data": { "data": {
"address": "[%key:common::config_flow::data::device%]" "address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "The Airthings devices discovered via Bluetooth."
} }
}, },
"bluetooth_confirm": { "bluetooth_confirm": {

View File

@@ -18,7 +18,9 @@ from homeassistant.components.binary_sensor import (
from homeassistant.const import EntityCategory from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.helpers.entity_registry as er
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonConfigEntry from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity from .entity import AmazonEntity
from .utils import async_update_unique_id from .utils import async_update_unique_id
@@ -51,11 +53,47 @@ BINARY_SENSORS: Final = (
), ),
is_supported=lambda device, key: device.sensors.get(key) is not None, is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: ( is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False device.online
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
), ),
), ),
) )
DEPRECATED_BINARY_SENSORS: Final = (
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: False,
),
)
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, hass: HomeAssistant,
@@ -66,6 +104,8 @@ async def async_setup_entry(
coordinator = entry.runtime_data coordinator = entry.runtime_data
entity_registry = er.async_get(hass)
# Replace unique id for "detectionState" binary sensor # Replace unique id for "detectionState" binary sensor
await async_update_unique_id( await async_update_unique_id(
hass, hass,
@@ -75,6 +115,16 @@ async def async_setup_entry(
"detectionState", "detectionState",
) )
# Clean up deprecated sensors
for sensor_desc in DEPRECATED_BINARY_SENSORS:
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{sensor_desc.key}"
if entity_id := entity_registry.async_get_entity_id(
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
):
_LOGGER.debug("Removing deprecated entity %s", entity_id)
entity_registry.async_remove(entity_id)
known_devices: set[str] = set() known_devices: set[str] = set()
def _check_device() -> None: def _check_device() -> None:

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["aioamazondevices"], "loggers": ["aioamazondevices"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.7"] "requirements": ["aioamazondevices==6.4.0"]
} }

View File

@@ -32,7 +32,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False device.online
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
) )
@@ -40,9 +42,9 @@ SENSORS: Final = (
AmazonSensorEntityDescription( AmazonSensorEntityDescription(
key="temperature", key="temperature",
device_class=SensorDeviceClass.TEMPERATURE, device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement_fn=lambda device, _key: ( native_unit_of_measurement_fn=lambda device, key: (
UnitOfTemperature.CELSIUS UnitOfTemperature.CELSIUS
if device.sensors[_key].scale == "CELSIUS" if key in device.sensors and device.sensors[key].scale == "CELSIUS"
else UnitOfTemperature.FAHRENHEIT else UnitOfTemperature.FAHRENHEIT
), ),
state_class=SensorStateClass.MEASUREMENT, state_class=SensorStateClass.MEASUREMENT,

View File

@@ -18,7 +18,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity from .entity import AmazonEntity
from .utils import alexa_api_call, async_update_unique_id from .utils import (
alexa_api_call,
async_remove_dnd_from_virtual_group,
async_update_unique_id,
)
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
@@ -29,7 +33,9 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
is_on_fn: Callable[[AmazonDevice], bool] is_on_fn: Callable[[AmazonDevice], bool]
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: ( is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False device.online
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
) )
method: str method: str
@@ -58,6 +64,9 @@ async def async_setup_entry(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd" hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
) )
# Remove DND switch from virtual groups
await async_remove_dnd_from_virtual_group(hass, coordinator)
known_devices: set[str] = set() known_devices: set[str] = set()
def _check_device() -> None: def _check_device() -> None:

View File

@@ -4,8 +4,10 @@ from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps from functools import wraps
from typing import Any, Concatenate from typing import Any, Concatenate
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er import homeassistant.helpers.entity_registry as er
@@ -61,3 +63,21 @@ async def async_update_unique_id(
# Update the registry with the new unique_id # Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id) entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
async def async_remove_dnd_from_virtual_group(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
) -> None:
"""Remove entity DND from virtual group."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-do_not_disturb"
entity_id = entity_registry.async_get_entity_id(
DOMAIN, SWITCH_DOMAIN, unique_id
)
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
if entity_id and is_group:
entity_registry.async_remove(entity_id)
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)

View File

@@ -629,6 +629,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
devices_info.append( devices_info.append(
{ {
"entities": [],
"entry_type": device_entry.entry_type, "entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None, "has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version, "hw_version": device_entry.hw_version,
@@ -637,7 +638,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"model_id": device_entry.model_id, "model_id": device_entry.model_id,
"sw_version": device_entry.sw_version, "sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id, "via_device": device_entry.via_device_id,
"entities": [],
} }
) )

View File

@@ -2,7 +2,9 @@
from __future__ import annotations from __future__ import annotations
from typing import Any from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = { TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes", "wan_rx": "sensor_rx_bytes",
@@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")} return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T: def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists.""" """Translate raw data to legacy format for dicts and lists."""
if raw is None: if raw is None:

View File

@@ -26,6 +26,9 @@ async def async_setup_entry(
if CONF_HOST in config_entry.data: if CONF_HOST in config_entry.data:
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session) coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
else: else:
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session) coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
@@ -33,11 +36,6 @@ async def async_setup_entry(
config_entry.runtime_data = coordinator config_entry.runtime_data = coordinator
if CONF_HOST in config_entry.data:
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True return True

View File

@@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame from homeassistant.helpers import frame
from homeassistant.util import slugify from homeassistant.util import slugify
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
from . import util from . import util
from .agent import BackupAgent from .agent import BackupAgent
@@ -145,7 +144,7 @@ class DownloadBackupView(HomeAssistantView):
return Response(status=HTTPStatus.NOT_FOUND) return Response(status=HTTPStatus.NOT_FOUND)
else: else:
stream = await agent.async_download_backup(backup_id) stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream)) reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
worker_done_event = asyncio.Event() worker_done_event = asyncio.Event()
@@ -153,7 +152,7 @@ class DownloadBackupView(HomeAssistantView):
"""Call by the worker thread when it's done.""" """Call by the worker thread when it's done."""
hass.loop.call_soon_threadsafe(worker_done_event.set) hass.loop.call_soon_threadsafe(worker_done_event.set)
stream = AsyncIteratorWriter(hass.loop) stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread( worker = threading.Thread(
target=util.decrypt_backup, target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []], args=[backup, reader, stream, password, on_done, 0, []],

View File

@@ -38,7 +38,6 @@ from homeassistant.helpers import (
) )
from homeassistant.helpers.json import json_bytes from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util from homeassistant.util import dt as dt_util, json as json_util
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import util as backup_util from . import util as backup_util
from .agent import ( from .agent import (
@@ -73,6 +72,7 @@ from .models import (
) )
from .store import BackupStore from .store import BackupStore
from .util import ( from .util import (
AsyncIteratorReader,
DecryptedBackupStreamer, DecryptedBackupStreamer,
EncryptedBackupStreamer, EncryptedBackupStreamer,
make_backup_dir, make_backup_dir,
@@ -1525,7 +1525,7 @@ class BackupManager:
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb") reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
else: else:
backup_stream = await agent.async_download_backup(backup_id) backup_stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream)) reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
try: try:
await self.hass.async_add_executor_job( await self.hass.async_add_executor_job(
validate_password_stream, reader, password validate_password_stream, reader, password

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import asyncio import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine from collections.abc import AsyncIterator, Callable, Coroutine
from concurrent.futures import CancelledError, Future
import copy import copy
from dataclasses import dataclass, replace from dataclasses import dataclass, replace
from io import BytesIO from io import BytesIO
@@ -13,7 +14,7 @@ from pathlib import Path, PurePath
from queue import SimpleQueue from queue import SimpleQueue
import tarfile import tarfile
import threading import threading
from typing import IO, Any, cast from typing import IO, Any, Self, cast
import aiohttp import aiohttp
from securetar import SecureTarError, SecureTarFile, SecureTarReadError from securetar import SecureTarError, SecureTarFile, SecureTarReadError
@@ -22,11 +23,6 @@ from homeassistant.backup_restore import password_to_key
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util from homeassistant.util import dt as dt_util
from homeassistant.util.async_iterator import (
Abort,
AsyncIteratorReader,
AsyncIteratorWriter,
)
from homeassistant.util.json import JsonObjectType, json_loads_object from homeassistant.util.json import JsonObjectType, json_loads_object
from .const import BUF_SIZE, LOGGER from .const import BUF_SIZE, LOGGER
@@ -63,6 +59,12 @@ class BackupEmpty(DecryptError):
_message = "No tar files found in the backup." _message = "No tar files found in the backup."
class AbortCipher(HomeAssistantError):
"""Abort the cipher operation."""
_message = "Abort cipher operation."
def make_backup_dir(path: Path) -> None: def make_backup_dir(path: Path) -> None:
"""Create a backup directory if it does not exist.""" """Create a backup directory if it does not exist."""
path.mkdir(exist_ok=True) path.mkdir(exist_ok=True)
@@ -164,6 +166,106 @@ def validate_password(path: Path, password: str | None) -> bool:
return False return False
class AsyncIteratorReader:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
def abort(self) -> None:
"""Abort the reader."""
self._aborted = True
if self._next_future is not None:
self._next_future.cancel()
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
)
if self._aborted:
self._next_future.cancel()
raise AbortCipher
try:
self._buffer = self._next_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)
n -= len(chunk)
self._pos += len(chunk)
if self._pos == len(self._buffer):
self._buffer = None
return bytes(result)
def close(self) -> None:
"""Close the iterator."""
class AsyncIteratorWriter:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
return self
async def __anext__(self) -> bytes:
"""Get the next chunk from the iterator."""
if data := await self._queue.get():
return data
raise StopAsyncIteration
def abort(self) -> None:
"""Abort the writer."""
self._aborted = True
if self._write_future is not None:
self._write_future.cancel()
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
self._write_future = asyncio.run_coroutine_threadsafe(
self._queue.put(s), self._hass.loop
)
if self._aborted:
self._write_future.cancel()
raise AbortCipher
try:
self._write_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos += len(s)
return len(s)
def validate_password_stream( def validate_password_stream(
input_stream: IO[bytes], input_stream: IO[bytes],
password: str | None, password: str | None,
@@ -240,7 +342,7 @@ def decrypt_backup(
finally: finally:
# Write an empty chunk to signal the end of the stream # Write an empty chunk to signal the end of the stream
output_stream.write(b"") output_stream.write(b"")
except Abort: except AbortCipher:
LOGGER.debug("Cipher operation aborted") LOGGER.debug("Cipher operation aborted")
finally: finally:
on_done(error) on_done(error)
@@ -328,7 +430,7 @@ def encrypt_backup(
finally: finally:
# Write an empty chunk to signal the end of the stream # Write an empty chunk to signal the end of the stream
output_stream.write(b"") output_stream.write(b"")
except Abort: except AbortCipher:
LOGGER.debug("Cipher operation aborted") LOGGER.debug("Cipher operation aborted")
finally: finally:
on_done(error) on_done(error)
@@ -455,8 +557,8 @@ class _CipherBackupStreamer:
self._hass.loop.call_soon_threadsafe(worker_status.done.set) self._hass.loop.call_soon_threadsafe(worker_status.done.set)
stream = await self._open_stream() stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass.loop, stream) reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass.loop) writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread( worker = threading.Thread(
target=self._cipher_func, target=self._cipher_func,
args=[ args=[

View File

@@ -73,12 +73,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
# Add the websocket and API client # Add the websocket and API client
entry.runtime_data = BangOlufsenData(websocket, client) entry.runtime_data = BangOlufsenData(websocket, client)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) # Start WebSocket connection
# Start WebSocket connection once the platforms have been loaded.
# This ensures that the initial WebSocket notifications are dispatched to entities
await client.connect_notifications(remote_control=True, reconnect=True) await client.connect_notifications(remote_control=True, reconnect=True)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True return True

View File

@@ -125,8 +125,7 @@ async def async_setup_entry(
async_add_entities( async_add_entities(
new_entities=[ new_entities=[
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client) BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
], ]
update_before_add=True,
) )
# Register actions. # Register actions.
@@ -267,8 +266,34 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._software_status.software_version, self._software_status.software_version,
) )
# Get overall device state once. This is handled by WebSocket events the rest of the time.
product_state = await self._client.get_product_state()
# Get volume information.
if product_state.volume:
self._volume = product_state.volume
# Get all playback information.
# Ensure that the metadata is not None upon startup
if product_state.playback:
if product_state.playback.metadata:
self._playback_metadata = product_state.playback.metadata
self._remote_leader = product_state.playback.metadata.remote_leader
if product_state.playback.progress:
self._playback_progress = product_state.playback.progress
if product_state.playback.source:
self._source_change = product_state.playback.source
if product_state.playback.state:
self._playback_state = product_state.playback.state
# Set initial state
if self._playback_state.value:
self._state = self._playback_state.value
self._attr_media_position_updated_at = utcnow() self._attr_media_position_updated_at = utcnow()
# Get the highest resolution available of the given images.
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
# If the device has been updated with new sources, then the API will fail here. # If the device has been updated with new sources, then the API will fail here.
await self._async_update_sources() await self._async_update_sources()

View File

@@ -3,12 +3,16 @@ beolink_allstandby:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
beolink_expand: beolink_expand:
target: target:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
fields: fields:
all_discovered: all_discovered:
required: false required: false
@@ -33,6 +37,8 @@ beolink_join:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
fields: fields:
jid_options: jid_options:
collapsed: false collapsed: false
@@ -65,12 +71,16 @@ beolink_leave:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
beolink_unexpand: beolink_unexpand:
target: target:
entity: entity:
integration: bang_olufsen integration: bang_olufsen
domain: media_player domain: media_player
device:
integration: bang_olufsen
fields: fields:
jid_options: jid_options:
collapsed: false collapsed: false

View File

@@ -8,7 +8,7 @@
"integration_type": "device", "integration_type": "device",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
"requirements": ["brother==5.1.0"], "requirements": ["brother==5.1.1"],
"zeroconf": [ "zeroconf": [
{ {
"type": "_printer._tcp.local.", "type": "_printer._tcp.local.",

View File

@@ -315,7 +315,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.register_view(CalendarListView(component)) hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component)) hass.http.register_view(CalendarEventView(component))
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar") frontend.async_register_built_in_panel(
hass, "calendar", "calendar", "hass:calendar"
)
websocket_api.async_register_command(hass, handle_calendar_event_create) websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete) websocket_api.async_register_command(hass, handle_calendar_event_delete)

View File

@@ -51,6 +51,12 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.event import async_track_time_interval
@@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename"
ATTR_MEDIA_PLAYER: Final = "media_player" ATTR_MEDIA_PLAYER: Final = "media_player"
ATTR_FORMAT: Final = "format" ATTR_FORMAT: Final = "format"
# These constants are deprecated as of Home Assistant 2024.10
# Please use the StreamType enum instead.
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
class CameraEntityFeature(IntFlag): class CameraEntityFeature(IntFlag):
"""Supported features of the camera entity.""" """Supported features of the camera entity."""
@@ -1105,3 +1117,11 @@ async def async_handle_record_service(
duration=service_call.data[CONF_DURATION], duration=service_call.data[CONF_DURATION],
lookback=service_call.data[CONF_LOOKBACK], lookback=service_call.data[CONF_LOOKBACK],
) )
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -53,6 +53,7 @@ from .const import (
CONF_ACME_SERVER, CONF_ACME_SERVER,
CONF_ALEXA, CONF_ALEXA,
CONF_ALIASES, CONF_ALIASES,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID, CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG, CONF_ENTITY_CONFIG,
CONF_FILTER, CONF_FILTER,
@@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str, vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str, vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str, vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str, vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str, vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str, vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,

View File

@@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id"
CONF_ACCOUNT_LINK_SERVER = "account_link_server" CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server" CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server" CONF_ACME_SERVER = "acme_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server" CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server" CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server" CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -13,6 +13,6 @@
"integration_type": "system", "integration_type": "system",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"], "loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.2.0"], "requirements": ["hass-nabucasa==1.1.1"],
"single_config_entry": true "single_config_entry": true
} }

View File

@@ -1,106 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
The integration does not provide any actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
Stale docstring and test name: `test_form_home` and reusing result.
Extract `async_setup_entry` into own fixture.
Avoid importing `config_flow` in tests.
Test reauth with errors
config-flow:
status: todo
comment: |
The config flow misses data descriptions.
Remove URLs from data descriptions, they should be replaced with placeholders.
Make use of Electricity Maps zone keys in country code as dropdown.
Make use of location selector for coordinates.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: todo
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
`test_sensor` could use `snapshot_platform`
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: |
This integration connect to a single device per configuration entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from asyncio.exceptions import TimeoutError from asyncio.exceptions import TimeoutError
from collections.abc import Mapping from collections.abc import Mapping
import re
from typing import Any from typing import Any
from aiocomelit import ( from aiocomelit import (
@@ -27,25 +28,20 @@ from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252" DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = "111111" DEFAULT_PIN = "111111"
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema( USER_SCHEMA = vol.Schema(
{ {
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string, vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port, vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex), vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST), vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
} }
) )
STEP_REAUTH_DATA_SCHEMA = vol.Schema( STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_RECONFIGURE = vol.Schema( STEP_RECONFIGURE = vol.Schema(
{ {
vol.Required(CONF_HOST): cv.string, vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port, vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex), vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
} }
) )
@@ -55,6 +51,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
api: ComelitCommonApi api: ComelitCommonApi
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_PIN]):
raise InvalidPin
session = await async_client_session(hass) session = await async_client_session(hass)
if data.get(CONF_TYPE, BRIDGE) == BRIDGE: if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
api = ComeliteSerialBridgeApi( api = ComeliteSerialBridgeApi(
@@ -105,6 +104,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except InvalidAuth: except InvalidAuth:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
@@ -146,6 +147,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except InvalidAuth: except InvalidAuth:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
@@ -189,6 +192,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect" errors["base"] = "cannot_connect"
except InvalidAuth: except InvalidAuth:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001 except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception") _LOGGER.exception("Unexpected exception")
errors["base"] = "unknown" errors["base"] = "unknown"
@@ -210,3 +215,7 @@ class CannotConnect(HomeAssistantError):
class InvalidAuth(HomeAssistantError): class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth.""" """Error to indicate there is invalid auth."""
class InvalidPin(HomeAssistantError):
"""Error to indicate an invalid pin."""

View File

@@ -161,7 +161,7 @@ class ComelitSerialBridge(
entry: ComelitConfigEntry, entry: ComelitConfigEntry,
host: str, host: str,
port: int, port: int,
pin: int, pin: str,
session: ClientSession, session: ClientSession,
) -> None: ) -> None:
"""Initialize the scanner.""" """Initialize the scanner."""
@@ -195,7 +195,7 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
entry: ComelitConfigEntry, entry: ComelitConfigEntry,
host: str, host: str,
port: int, port: int,
pin: int, pin: str,
session: ClientSession, session: ClientSession,
) -> None: ) -> None:
"""Initialize the scanner.""" """Initialize the scanner."""

View File

@@ -7,7 +7,14 @@ from typing import Any, cast
from aiocomelit import ComelitSerialBridgeObject from aiocomelit import ComelitSerialBridgeObject
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
from homeassistant.components.cover import CoverDeviceClass, CoverEntity from homeassistant.components.cover import (
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
CoverDeviceClass,
CoverEntity,
)
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.restore_state import RestoreEntity
@@ -62,7 +69,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
super().__init__(coordinator, device, config_entry_entry_id) super().__init__(coordinator, device, config_entry_entry_id)
# Device doesn't provide a status so we assume UNKNOWN at first startup # Device doesn't provide a status so we assume UNKNOWN at first startup
self._last_action: int | None = None self._last_action: int | None = None
self._last_state: str | None = None
def _current_action(self, action: str) -> bool: def _current_action(self, action: str) -> bool:
"""Return the current cover action.""" """Return the current cover action."""
@@ -98,7 +104,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
@bridge_api_call @bridge_api_call
async def _cover_set_state(self, action: int, state: int) -> None: async def _cover_set_state(self, action: int, state: int) -> None:
"""Set desired cover state.""" """Set desired cover state."""
self._last_state = self.state
await self.coordinator.api.set_device_status(COVER, self._device.index, action) await self.coordinator.api.set_device_status(COVER, self._device.index, action)
self.coordinator.data[COVER][self._device.index].status = state self.coordinator.data[COVER][self._device.index].status = state
self.async_write_ha_state() self.async_write_ha_state()
@@ -124,5 +129,10 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
await super().async_added_to_hass() await super().async_added_to_hass()
if last_state := await self.async_get_last_state(): if (state := await self.async_get_last_state()) is not None:
self._last_state = last_state.state if state.state == STATE_CLOSED:
self._last_action = STATE_COVER.index(STATE_CLOSING)
if state.state == STATE_OPEN:
self._last_action = STATE_COVER.index(STATE_OPENING)
self._attr_is_closed = state.state == STATE_CLOSED

View File

@@ -8,5 +8,5 @@
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["aiocomelit"], "loggers": ["aiocomelit"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["aiocomelit==0.12.3"] "requirements": ["aiocomelit==1.1.1"]
} }

View File

@@ -43,11 +43,13 @@
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
}, },
"error": { "error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
"unknown": "[%key:common::config_flow::error::unknown%]" "unknown": "[%key:common::config_flow::error::unknown%]"
} }
}, },

View File

@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the config component.""" """Set up the config component."""
frontend.async_register_built_in_panel( frontend.async_register_built_in_panel(
hass, "config", "config", "mdi:cog", require_admin=True hass, "config", "config", "hass:cog", require_admin=True
) )
for panel in SECTIONS: for panel in SECTIONS:

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
from http import HTTPStatus from http import HTTPStatus
import logging
from typing import Any, NoReturn from typing import Any, NoReturn
from aiohttp import web from aiohttp import web
@@ -24,12 +23,7 @@ from homeassistant.helpers.data_entry_flow import (
FlowManagerResourceView, FlowManagerResourceView,
) )
from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.json import ( from homeassistant.helpers.json import json_fragment
JSON_DUMP,
find_paths_unserializable_data,
json_bytes,
json_fragment,
)
from homeassistant.loader import ( from homeassistant.loader import (
Integration, Integration,
IntegrationNotFound, IntegrationNotFound,
@@ -37,9 +31,6 @@ from homeassistant.loader import (
async_get_integrations, async_get_integrations,
async_get_loaded_integration, async_get_loaded_integration,
) )
from homeassistant.util.json import format_unserializable_data
_LOGGER = logging.getLogger(__name__)
@callback @callback
@@ -411,40 +402,18 @@ def config_entries_flow_subscribe(
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow( connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
async_on_flow_init_remove async_on_flow_init_remove
) )
try:
serialized_flows = [
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
]
except (ValueError, TypeError):
# If we can't serialize, we'll filter out unserializable flows
serialized_flows = []
for flw in hass.config_entries.flow.async_progress():
if flw["context"]["source"] in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
):
continue
try:
serialized_flows.append(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
)
except (ValueError, TypeError):
_LOGGER.error(
"Unable to serialize to JSON. Bad data found at %s",
format_unserializable_data(
find_paths_unserializable_data(flw, dump=JSON_DUMP)
),
)
continue
connection.send_message( connection.send_message(
websocket_api.messages.construct_event_message( websocket_api.event_message(
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]")) msg["id"],
[
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
],
) )
) )
connection.send_result(msg["id"]) connection.send_result(msg["id"])

View File

@@ -514,7 +514,7 @@ class ChatLog:
"""Set the LLM system prompt.""" """Set the LLM system prompt."""
llm_api: llm.APIInstance | None = None llm_api: llm.APIInstance | None = None
if user_llm_hass_api is None: if not user_llm_hass_api:
pass pass
elif isinstance(user_llm_hass_api, llm.API): elif isinstance(user_llm_hass_api, llm.API):
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context) llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/daikin", "documentation": "https://www.home-assistant.io/integrations/daikin",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["pydaikin"], "loggers": ["pydaikin"],
"requirements": ["pydaikin==2.16.0"], "requirements": ["pydaikin==2.17.1"],
"zeroconf": ["_dkapi._tcp.local."] "zeroconf": ["_dkapi._tcp.local."]
} }

View File

@@ -32,7 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, entry,
options={**entry.options, CONF_SOURCE: source_entity_id}, options={**entry.options, CONF_SOURCE: source_entity_id},
) )
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload( entry.async_on_unload(
async_handle_source_entity_changes( async_handle_source_entity_changes(
@@ -47,9 +46,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
) )
) )
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,)) await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,)) return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))

View File

@@ -140,7 +140,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1 VERSION = 1
MINOR_VERSION = 4 MINOR_VERSION = 4

View File

@@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import ( from homeassistant.helpers.condition import (
Condition, Condition,
ConditionCheckerType, ConditionCheckerType,
ConditionConfig,
trace_condition_function, trace_condition_function,
) )
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
@@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition): class DeviceCondition(Condition):
"""Device condition.""" """Device condition."""
_hass: HomeAssistant def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
_config: ConfigType """Initialize condition."""
self._config = config
@classmethod self._hass = hass
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
@classmethod @classmethod
async def async_validate_config( async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType: ) -> ConfigType:
"""Validate config. """Validate device condition config."""
return await async_validate_device_automation_config(
This is here just to satisfy the abstract class interface. It is never called. hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
""" )
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
async def async_get_checker(self) -> condition.ConditionCheckerType: async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition.""" """Test a device condition."""

View File

@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
self._attr_translation_key = "button" self._attr_translation_key = "button"
self._attr_translation_placeholders = {"key": str(key)} self._attr_translation_placeholders = {"key": str(key)}
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the binary sensor state.""" """Update the binary sensor state."""
if ( if (
message[0] == self._remote_control_property.element_uid message[0] == self._remote_control_property.element_uid

View File

@@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity):
) )
self.subscriber: Subscriber | None = None self.subscriber: Subscriber | None = None
self.sync_callback = self._sync
self._value: float self._value: float
@@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity):
self._device_instance.uid, self.subscriber self._device_instance.uid, self.subscriber
) )
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the state.""" """Update the state."""
if message[0] == self._attr_unique_id: if message[0] == self._attr_unique_id:
self._value = message[1] self._value = message[1]

View File

@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
""" """
return f"{self._attr_unique_id}_{self._sensor_type}" return f"{self._attr_unique_id}_{self._sensor_type}"
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the consumption sensor state.""" """Update the consumption sensor state."""
if message[0] == self._attr_unique_id: if message[0] == self._attr_unique_id:
self._value = getattr( self._value = getattr(

View File

@@ -13,3 +13,8 @@ class Subscriber:
"""Initiate the subscriber.""" """Initiate the subscriber."""
self.name = name self.name = name
self.callback = callback self.callback = callback
def update(self, message: str) -> None:
"""Trigger hass to update the device."""
_LOGGER.debug('%s got message "%s"', self.name, message)
self.callback(message)

View File

@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
"""Switch off the device.""" """Switch off the device."""
self._binary_switch_property.set(state=False) self._binary_switch_property.set(state=False)
def sync_callback(self, message: tuple) -> None: def _sync(self, message: tuple) -> None:
"""Update the binary switch state and consumption.""" """Update the binary switch state and consumption."""
if message[0].startswith("devolo.BinarySwitch"): if message[0].startswith("devolo.BinarySwitch"):
self._attr_is_on = self._device_instance.binary_switch_property[ self._attr_is_on = self._device_instance.binary_switch_property[

View File

@@ -56,16 +56,16 @@ async def async_setup_entry(
hostname = entry.data[CONF_HOSTNAME] hostname = entry.data[CONF_HOSTNAME]
name = entry.data[CONF_NAME] name = entry.data[CONF_NAME]
nameserver_ipv4 = entry.options[CONF_RESOLVER] resolver_ipv4 = entry.options[CONF_RESOLVER]
nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6] resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
port_ipv4 = entry.options[CONF_PORT] port_ipv4 = entry.options[CONF_PORT]
port_ipv6 = entry.options[CONF_PORT_IPV6] port_ipv6 = entry.options[CONF_PORT_IPV6]
entities = [] entities = []
if entry.data[CONF_IPV4]: if entry.data[CONF_IPV4]:
entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4)) entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4))
if entry.data[CONF_IPV6]: if entry.data[CONF_IPV6]:
entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6)) entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6))
async_add_entities(entities, update_before_add=True) async_add_entities(entities, update_before_add=True)
@@ -77,13 +77,11 @@ class WanIpSensor(SensorEntity):
_attr_translation_key = "dnsip" _attr_translation_key = "dnsip"
_unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"}) _unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"})
resolver: aiodns.DNSResolver
def __init__( def __init__(
self, self,
name: str, name: str,
hostname: str, hostname: str,
nameserver: str, resolver: str,
ipv6: bool, ipv6: bool,
port: int, port: int,
) -> None: ) -> None:
@@ -92,11 +90,11 @@ class WanIpSensor(SensorEntity):
self._attr_unique_id = f"{hostname}_{ipv6}" self._attr_unique_id = f"{hostname}_{ipv6}"
self.hostname = hostname self.hostname = hostname
self.port = port self.port = port
self.nameserver = nameserver self._resolver = resolver
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A" self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
self._retries = DEFAULT_RETRIES self._retries = DEFAULT_RETRIES
self._attr_extra_state_attributes = { self._attr_extra_state_attributes = {
"resolver": nameserver, "resolver": resolver,
"querytype": self.querytype, "querytype": self.querytype,
} }
self._attr_device_info = DeviceInfo( self._attr_device_info = DeviceInfo(
@@ -106,13 +104,13 @@ class WanIpSensor(SensorEntity):
model=aiodns.__version__, model=aiodns.__version__,
name=name, name=name,
) )
self.resolver: aiodns.DNSResolver
self.create_dns_resolver() self.create_dns_resolver()
def create_dns_resolver(self) -> None: def create_dns_resolver(self) -> None:
"""Create the DNS resolver.""" """Create the DNS resolver."""
self.resolver = aiodns.DNSResolver( self.resolver = aiodns.DNSResolver(tcp_port=self.port, udp_port=self.port)
nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port self.resolver.nameservers = [self._resolver]
)
async def async_update(self) -> None: async def async_update(self) -> None:
"""Get the current DNS IP address for hostname.""" """Get the current DNS IP address for hostname."""

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs", "documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"], "loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"] "requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"]
} }

View File

@@ -2,4 +2,3 @@ raw_get_positions:
target: target:
entity: entity:
domain: vacuum domain: vacuum
integration: ecovacs

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/environment_canada", "documentation": "https://www.home-assistant.io/integrations/environment_canada",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["env_canada"], "loggers": ["env_canada"],
"requirements": ["env-canada==0.11.2"] "requirements": ["env-canada==0.11.3"]
} }

View File

@@ -10,6 +10,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Filter from a config entry.""" """Set up Filter from a config entry."""
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True return True
@@ -17,3 +18,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Filter config entry.""" """Unload Filter config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -246,7 +246,6 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str: def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title.""" """Return config entry title."""

View File

@@ -1,27 +0,0 @@
"""The Firefly III integration."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Set up Firefly III from a config entry."""
coordinator = FireflyDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -1,140 +0,0 @@
"""Config flow for the Firefly III integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_URL): str,
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
vol.Required(CONF_API_KEY): str,
}
)
async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
"""Validate the user input allows us to connect."""
try:
client = Firefly(
api_url=data[CONF_URL],
api_key=data[CONF_API_KEY],
session=async_get_clientsession(hass),
)
await client.get_about()
except FireflyAuthenticationError:
raise InvalidAuth from None
except FireflyConnectionError as err:
raise CannotConnect from err
except FireflyTimeoutError as err:
raise FireflyClientTimeout from err
return True
class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Firefly III."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
try:
await _validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=user_input[CONF_URL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth when Firefly III API authentication fails."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth: ask for a new API key and validate."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
try:
await _validate_input(
self.hass,
data={
**reauth_entry.data,
CONF_API_KEY: user_input[CONF_API_KEY],
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class FireflyClientTimeout(HomeAssistantError):
"""Error to indicate a timeout occurred."""

View File

@@ -1,6 +0,0 @@
"""Constants for the Firefly III integration."""
DOMAIN = "firefly_iii"
MANUFACTURER = "Firefly III"
NAME = "Firefly III"

View File

@@ -1,137 +0,0 @@
"""Data Update Coordinator for Firefly III integration."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
from aiohttp import CookieJar
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
from pyfirefly.models import Account, Bill, Budget, Category, Currency
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type FireflyConfigEntry = ConfigEntry[FireflyDataUpdateCoordinator]
DEFAULT_SCAN_INTERVAL = timedelta(minutes=5)
@dataclass
class FireflyCoordinatorData:
"""Data structure for Firefly III coordinator data."""
accounts: list[Account]
categories: list[Category]
category_details: list[Category]
budgets: list[Budget]
bills: list[Bill]
primary_currency: Currency
class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]):
"""Coordinator to manage data updates for Firefly III integration."""
config_entry: FireflyConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: FireflyConfigEntry) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.firefly = Firefly(
api_url=self.config_entry.data[CONF_URL],
api_key=self.config_entry.data[CONF_API_KEY],
session=async_create_clientsession(
self.hass,
self.config_entry.data[CONF_VERIFY_SSL],
cookie_jar=CookieJar(unsafe=True),
),
)
async def _async_setup(self) -> None:
"""Set up the coordinator."""
try:
await self.firefly.get_about()
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
async def _async_update_data(self) -> FireflyCoordinatorData:
"""Fetch data from Firefly III API."""
now = datetime.now()
start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
end_date = now
try:
accounts = await self.firefly.get_accounts()
categories = await self.firefly.get_categories()
category_details = [
await self.firefly.get_category(
category_id=int(category.id), start=start_date, end=end_date
)
for category in categories
]
primary_currency = await self.firefly.get_currency_primary()
budgets = await self.firefly.get_budgets()
bills = await self.firefly.get_bills()
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
return FireflyCoordinatorData(
accounts=accounts,
categories=categories,
category_details=category_details,
budgets=budgets,
bills=bills,
primary_currency=primary_currency,
)

View File

@@ -1,40 +0,0 @@
"""Base entity for Firefly III integration."""
from __future__ import annotations
from yarl import URL
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import FireflyDataUpdateCoordinator
class FireflyBaseEntity(CoordinatorEntity[FireflyDataUpdateCoordinator]):
"""Base class for Firefly III entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize a Firefly entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
configuration_url=URL(coordinator.config_entry.data[CONF_URL]),
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.entity_description.key}",
)
},
)

View File

@@ -1,18 +0,0 @@
{
"entity": {
"sensor": {
"account_type": {
"default": "mdi:bank",
"state": {
"expense": "mdi:cash-minus",
"revenue": "mdi:cash-plus",
"asset": "mdi:account-cash",
"liability": "mdi:hand-coin"
}
},
"category": {
"default": "mdi:label"
}
}
}
}

View File

@@ -1,10 +0,0 @@
{
"domain": "firefly_iii",
"name": "Firefly III",
"codeowners": ["@erwindouna"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.6"]
}

View File

@@ -1,68 +0,0 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates:
status: exempt
comment: |
No explicit parallel updates are defined.
reauthentication-flow:
status: todo
comment: |
No reauthentication flow is defined. It will be done in a next iteration.
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -1,133 +0,0 @@
"""Sensor platform for Firefly III integration."""
from __future__ import annotations
from pyfirefly.models import Account, Category
from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.components.sensor.const import SensorDeviceClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
from .entity import FireflyBaseEntity
ACCOUNT_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="account_type",
translation_key="account",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
CATEGORY_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="category",
translation_key="category",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: FireflyConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Firefly III sensor platform."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = [
FireflyAccountEntity(
coordinator=coordinator,
entity_description=description,
account=account,
)
for account in coordinator.data.accounts
for description in ACCOUNT_SENSORS
]
entities.extend(
FireflyCategoryEntity(
coordinator=coordinator,
entity_description=description,
category=category,
)
for category in coordinator.data.category_details
for description in CATEGORY_SENSORS
)
async_add_entities(entities)
class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III account."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
account: Account,
) -> None:
"""Initialize Firefly account entity."""
super().__init__(coordinator, entity_description)
self._account = account
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{account.id}"
self._attr_name = account.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
# Account type state doesn't go well with the icons.json. Need to fix it.
if account.attributes.type == "expense":
self._attr_icon = "mdi:cash-minus"
elif account.attributes.type == "asset":
self._attr_icon = "mdi:account-cash"
elif account.attributes.type == "revenue":
self._attr_icon = "mdi:cash-plus"
elif account.attributes.type == "liability":
self._attr_icon = "mdi:hand-coin"
else:
self._attr_icon = "mdi:bank"
@property
def native_value(self) -> str | None:
"""Return the state of the sensor."""
return self._account.attributes.current_balance
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III category."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
category: Category,
) -> None:
"""Initialize Firefly category entity."""
super().__init__(coordinator, entity_description)
self._category = category
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{category.id}"
self._attr_name = category.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
spent_items = self._category.attributes.spent or []
earned_items = self._category.attributes.earned or []
spent = sum(float(item.sum) for item in spent_items if item.sum is not None)
earned = sum(float(item.sum) for item in earned_items if item.sum is not None)
if spent == 0 and earned == 0:
return None
return spent + earned

View File

@@ -1,49 +0,0 @@
{
"config": {
"step": {
"user": {
"data": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "[%key:common::config_flow::data::api_key%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "The API key for authenticating with Firefly",
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
},
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
},
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "The new API access token for authenticating with Firefly III"
},
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
}
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while trying to connect to the Firefly instance: {error}"
},
"invalid_auth": {
"message": "An error occurred while trying to authenticate: {error}"
},
"timeout_connect": {
"message": "A timeout occurred while trying to connect to the Firefly instance: {error}"
}
}
}

View File

@@ -452,10 +452,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.app.router.register_resource(IndexView(repo_path, hass)) hass.http.app.router.register_resource(IndexView(repo_path, hass))
async_register_built_in_panel(hass, "light")
async_register_built_in_panel(hass, "security")
async_register_built_in_panel(hass, "climate")
async_register_built_in_panel(hass, "profile") async_register_built_in_panel(hass, "profile")
async_register_built_in_panel( async_register_built_in_panel(
@@ -463,7 +459,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"developer-tools", "developer-tools",
require_admin=True, require_admin=True,
sidebar_title="developer_tools", sidebar_title="developer_tools",
sidebar_icon="mdi:hammer", sidebar_icon="hass:hammer",
) )
@callback @callback

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend", "documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system", "integration_type": "system",
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["home-assistant-frontend==20251001.0"] "requirements": ["home-assistant-frontend==20251001.2"]
} }

View File

@@ -1,10 +1,8 @@
load_url: load_url:
target:
device:
integration: fully_kiosk
fields: fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
url: url:
example: "https://home-assistant.io" example: "https://home-assistant.io"
required: true required: true
@@ -12,12 +10,10 @@ load_url:
text: text:
set_config: set_config:
target:
device:
integration: fully_kiosk
fields: fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
key: key:
example: "motionSensitivity" example: "motionSensitivity"
required: true required: true
@@ -30,14 +26,12 @@ set_config:
text: text:
start_application: start_application:
target:
device:
integration: fully_kiosk
fields: fields:
application: application:
example: "de.ozerov.fully" example: "de.ozerov.fully"
required: true required: true
selector: selector:
text: text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,10 +147,6 @@
"name": "Load URL", "name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.", "description": "Loads a URL on Fully Kiosk Browser.",
"fields": { "fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": { "url": {
"name": "[%key:common::config_flow::data::url%]", "name": "[%key:common::config_flow::data::url%]",
"description": "URL to load." "description": "URL to load."
@@ -161,10 +157,6 @@
"name": "Set configuration", "name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.", "description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": { "fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": { "key": {
"name": "Key", "name": "Key",
"description": "Configuration parameter to set." "description": "Configuration parameter to set."
@@ -182,10 +174,6 @@
"application": { "application": {
"name": "Application", "name": "Application",
"description": "Package name of the application to start." "description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
} }
} }
} }

View File

@@ -108,7 +108,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, entry,
options={**entry.options, CONF_HUMIDIFIER: source_entity_id}, options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
) )
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload( entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the humidifer, # We use async_handle_source_entity_changes to track changes to the humidifer,
@@ -141,7 +140,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]}, options={**entry.options, CONF_SENSOR: data["entity_id"]},
) )
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload( entry.async_on_unload(
async_track_entity_registry_updated_event( async_track_entity_registry_updated_event(
@@ -150,6 +148,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
) )
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,)) await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True return True
@@ -187,6 +186,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms( return await hass.config_entries.async_unload_platforms(

View File

@@ -96,7 +96,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str: def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title.""" """Return config entry title."""

View File

@@ -35,7 +35,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, entry,
options={**entry.options, CONF_HEATER: source_entity_id}, options={**entry.options, CONF_HEATER: source_entity_id},
) )
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload( entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the heater, but # We use async_handle_source_entity_changes to track changes to the heater, but
@@ -68,7 +67,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]}, options={**entry.options, CONF_SENSOR: data["entity_id"]},
) )
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload( entry.async_on_unload(
async_track_entity_registry_updated_event( async_track_entity_registry_updated_event(
@@ -77,6 +75,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
) )
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True return True
@@ -114,6 +113,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -104,7 +104,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str: def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title.""" """Return config entry title."""

View File

@@ -76,10 +76,6 @@ async def async_unload_entry(
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
) -> bool: ) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
if not hass.config_entries.async_loaded_entries(DOMAIN):
for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name)
conversation.async_unset_agent(hass, entry) conversation.async_unset_agent(hass, entry)
return True return True

View File

@@ -26,7 +26,7 @@ from homeassistant.components.media_player import (
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
from homeassistant.helpers.event import async_call_later from homeassistant.helpers.event import async_call_later
@@ -68,7 +68,13 @@ async def async_send_text_commands(
) -> list[CommandResponse]: ) -> list[CommandResponse]:
"""Send text commands to Google Assistant Service.""" """Send text commands to Google Assistant Service."""
# There can only be 1 entry (config_flow has single_instance_allowed) # There can only be 1 entry (config_flow has single_instance_allowed)
entry: GoogleAssistantSDKConfigEntry = hass.config_entries.async_entries(DOMAIN)[0] entries = hass.config_entries.async_loaded_entries(DOMAIN)
if not entries:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_loaded",
)
entry: GoogleAssistantSDKConfigEntry = entries[0]
session = entry.runtime_data.session session = entry.runtime_data.session
try: try:

View File

@@ -1,4 +1,4 @@
"""Support for Google Assistant SDK.""" """Services for the Google Assistant SDK integration."""
from __future__ import annotations from __future__ import annotations

View File

@@ -65,6 +65,9 @@
} }
}, },
"exceptions": { "exceptions": {
"entry_not_loaded": {
"message": "Entry not loaded"
},
"grpc_error": { "grpc_error": {
"message": "Failed to communicate with Google Assistant" "message": "Failed to communicate with Google Assistant"
} }

View File

@@ -456,6 +456,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
"""Initialize the agent.""" """Initialize the agent."""
self.entry = entry self.entry = entry
self.subentry = subentry self.subentry = subentry
self.default_model = default_model
self._attr_name = subentry.title self._attr_name = subentry.title
self._genai_client = entry.runtime_data self._genai_client = entry.runtime_data
self._attr_unique_id = subentry.subentry_id self._attr_unique_id = subentry.subentry_id
@@ -489,7 +490,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
tools = tools or [] tools = tools or []
tools.append(Tool(google_search=GoogleSearch())) tools.append(Tool(google_search=GoogleSearch()))
model_name = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) model_name = options.get(CONF_CHAT_MODEL, self.default_model)
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model> # Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
supports_system_instruction = ( supports_system_instruction = (
"gemma" not in model_name "gemma" not in model_name
@@ -620,7 +621,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
def create_generate_content_config(self) -> GenerateContentConfig: def create_generate_content_config(self) -> GenerateContentConfig:
"""Create the GenerateContentConfig for the LLM.""" """Create the GenerateContentConfig for the LLM."""
options = self.subentry.data options = self.subentry.data
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) model = options.get(CONF_CHAT_MODEL, self.default_model)
thinking_config: ThinkingConfig | None = None thinking_config: ThinkingConfig | None = None
if model.startswith("models/gemini-2.5") and not model.endswith( if model.startswith("models/gemini-2.5") and not model.endswith(
("tts", "image", "image-preview") ("tts", "image", "image-preview")

View File

@@ -1,5 +1,7 @@
set_vacation: set_vacation:
target: target:
device:
integration: google_mail
entity: entity:
integration: google_mail integration: google_mail
fields: fields:

View File

@@ -141,9 +141,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await hass.config_entries.async_forward_entry_setups( await hass.config_entries.async_forward_entry_setups(
entry, (entry.options["group_type"],) entry, (entry.options["group_type"],)
) )
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms( return await hass.config_entries.async_unload_platforms(

View File

@@ -329,7 +329,6 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW options_flow = OPTIONS_FLOW
options_flow_reloads = True
@callback @callback
def async_config_entry_title(self, options: Mapping[str, Any]) -> str: def async_config_entry_title(self, options: Mapping[str, Any]) -> str:

View File

@@ -1,18 +1,14 @@
"""The Growatt server PV inverter sensor integration.""" """The Growatt server PV inverter sensor integration."""
from collections.abc import Mapping from collections.abc import Mapping
import logging
import growattServer import growattServer
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_URL, CONF_USERNAME from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError from homeassistant.exceptions import ConfigEntryError
from .const import ( from .const import (
AUTH_API_TOKEN,
AUTH_PASSWORD,
CONF_AUTH_TYPE,
CONF_PLANT_ID, CONF_PLANT_ID,
DEFAULT_PLANT_ID, DEFAULT_PLANT_ID,
DEFAULT_URL, DEFAULT_URL,
@@ -23,110 +19,36 @@ from .const import (
from .coordinator import GrowattConfigEntry, GrowattCoordinator from .coordinator import GrowattConfigEntry, GrowattCoordinator
from .models import GrowattRuntimeData from .models import GrowattRuntimeData
_LOGGER = logging.getLogger(__name__)
def get_device_list(
def get_device_list_classic(
api: growattServer.GrowattApi, config: Mapping[str, str] api: growattServer.GrowattApi, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]: ) -> tuple[list[dict[str, str]], str]:
"""Retrieve the device list for the selected plant.""" """Retrieve the device list for the selected plant."""
plant_id = config[CONF_PLANT_ID] plant_id = config[CONF_PLANT_ID]
# Log in to api and fetch first plant if no plant id is defined. # Log in to api and fetch first plant if no plant id is defined.
try: login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD]) if (
# DEBUG: Log the actual response structure not login_response["success"]
except Exception as ex: and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
_LOGGER.error("DEBUG - Login response: %s", login_response) ):
raise ConfigEntryError( raise ConfigEntryError("Username, Password or URL may be incorrect!")
f"Error communicating with Growatt API during login: {ex}"
) from ex
if not login_response.get("success"):
msg = login_response.get("msg", "Unknown error")
_LOGGER.debug("Growatt login failed: %s", msg)
if msg == LOGIN_INVALID_AUTH_CODE:
raise ConfigEntryAuthFailed("Username, Password or URL may be incorrect!")
raise ConfigEntryError(f"Growatt login failed: {msg}")
user_id = login_response["user"]["id"] user_id = login_response["user"]["id"]
if plant_id == DEFAULT_PLANT_ID: if plant_id == DEFAULT_PLANT_ID:
try: plant_info = api.plant_list(user_id)
plant_info = api.plant_list(user_id)
except Exception as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during plant list: {ex}"
) from ex
if not plant_info or "data" not in plant_info or not plant_info["data"]:
raise ConfigEntryError("No plants found for this account.")
plant_id = plant_info["data"][0]["plantId"] plant_id = plant_info["data"][0]["plantId"]
# Get a list of devices for specified plant to add sensors for. # Get a list of devices for specified plant to add sensors for.
try: devices = api.device_list(plant_id)
devices = api.device_list(plant_id)
except Exception as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during device list: {ex}"
) from ex
return devices, plant_id return devices, plant_id
def get_device_list_v1(
api, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
"""Device list logic for Open API V1.
Note: Plant selection (including auto-selection if only one plant exists)
is handled in the config flow before this function is called. This function
only fetches devices for the already-selected plant_id.
"""
plant_id = config[CONF_PLANT_ID]
try:
devices_dict = api.device_list(plant_id)
except growattServer.GrowattV1ApiError as e:
raise ConfigEntryError(
f"API error during device list: {e} (Code: {getattr(e, 'error_code', None)}, Message: {getattr(e, 'error_msg', None)})"
) from e
devices = devices_dict.get("devices", [])
# Only MIN device (type = 7) support implemented in current V1 API
supported_devices = [
{
"deviceSn": device.get("device_sn", ""),
"deviceType": "min",
}
for device in devices
if device.get("type") == 7
]
for device in devices:
if device.get("type") != 7:
_LOGGER.warning(
"Device %s with type %s not supported in Open API V1, skipping",
device.get("device_sn", ""),
device.get("type"),
)
return supported_devices, plant_id
def get_device_list(
api, config: Mapping[str, str], api_version: str
) -> tuple[list[dict[str, str]], str]:
"""Dispatch to correct device list logic based on API version."""
if api_version == "v1":
return get_device_list_v1(api, config)
if api_version == "classic":
return get_device_list_classic(api, config)
raise ConfigEntryError(f"Unknown API version: {api_version}")
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, config_entry: GrowattConfigEntry hass: HomeAssistant, config_entry: GrowattConfigEntry
) -> bool: ) -> bool:
"""Set up Growatt from a config entry.""" """Set up Growatt from a config entry."""
config = config_entry.data config = config_entry.data
username = config[CONF_USERNAME]
url = config.get(CONF_URL, DEFAULT_URL) url = config.get(CONF_URL, DEFAULT_URL)
# If the URL has been deprecated then change to the default instead # If the URL has been deprecated then change to the default instead
@@ -136,24 +58,11 @@ async def async_setup_entry(
new_data[CONF_URL] = url new_data[CONF_URL] = url
hass.config_entries.async_update_entry(config_entry, data=new_data) hass.config_entries.async_update_entry(config_entry, data=new_data)
# Determine API version # Initialise the library with the username & a random id each time it is started
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN: api = growattServer.GrowattApi(add_random_user_id=True, agent_identifier=username)
api_version = "v1" api.server_url = url
token = config[CONF_TOKEN]
api = growattServer.OpenApiV1(token=token)
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
api_version = "classic"
username = config[CONF_USERNAME]
api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=username
)
api.server_url = url
else:
raise ConfigEntryError("Unknown authentication type in config entry.")
devices, plant_id = await hass.async_add_executor_job( devices, plant_id = await hass.async_add_executor_job(get_device_list, api, config)
get_device_list, api, config, api_version
)
# Create a coordinator for the total sensors # Create a coordinator for the total sensors
total_coordinator = GrowattCoordinator( total_coordinator = GrowattCoordinator(
@@ -166,7 +75,7 @@ async def async_setup_entry(
hass, config_entry, device["deviceSn"], device["deviceType"], plant_id hass, config_entry, device["deviceSn"], device["deviceType"], plant_id
) )
for device in devices for device in devices
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min"] if device["deviceType"] in ["inverter", "tlx", "storage", "mix"]
} }
# Perform the first refresh for the total coordinator # Perform the first refresh for the total coordinator

View File

@@ -1,38 +1,22 @@
"""Config flow for growatt server integration.""" """Config flow for growatt server integration."""
import logging
from typing import Any from typing import Any
import growattServer import growattServer
import requests
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import ( from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
CONF_NAME,
CONF_PASSWORD,
CONF_TOKEN,
CONF_URL,
CONF_USERNAME,
)
from homeassistant.core import callback from homeassistant.core import callback
from .const import ( from .const import (
ABORT_NO_PLANTS,
AUTH_API_TOKEN,
AUTH_PASSWORD,
CONF_AUTH_TYPE,
CONF_PLANT_ID, CONF_PLANT_ID,
DEFAULT_URL, DEFAULT_URL,
DOMAIN, DOMAIN,
ERROR_CANNOT_CONNECT,
ERROR_INVALID_AUTH,
LOGIN_INVALID_AUTH_CODE, LOGIN_INVALID_AUTH_CODE,
SERVER_URLS, SERVER_URLS,
) )
_LOGGER = logging.getLogger(__name__)
class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
"""Config flow class.""" """Config flow class."""
@@ -43,98 +27,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None: def __init__(self) -> None:
"""Initialise growatt server flow.""" """Initialise growatt server flow."""
self.user_id: str | None = None self.user_id = None
self.data: dict[str, Any] = {} self.data: dict[str, Any] = {}
self.auth_type: str | None = None
self.plants: list[dict[str, Any]] = []
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the start of the config flow."""
return self.async_show_menu(
step_id="user",
menu_options=["password_auth", "token_auth"],
)
async def async_step_password_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle username/password authentication."""
if user_input is None:
return self._async_show_password_form()
self.auth_type = AUTH_PASSWORD
# Traditional username/password authentication
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
)
self.api.server_url = user_input[CONF_URL]
try:
login_response = await self.hass.async_add_executor_job(
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
)
except requests.exceptions.RequestException as ex:
_LOGGER.error("Network error during Growatt API login: %s", ex)
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
except (ValueError, KeyError, TypeError, AttributeError) as ex:
_LOGGER.error("Invalid response format during login: %s", ex)
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
if (
not login_response["success"]
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
):
return self._async_show_password_form({"base": ERROR_INVALID_AUTH})
self.user_id = login_response["user"]["id"]
self.data = user_input
self.data[CONF_AUTH_TYPE] = self.auth_type
return await self.async_step_plant()
async def async_step_token_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle API token authentication."""
if user_input is None:
return self._async_show_token_form()
self.auth_type = AUTH_API_TOKEN
# Using token authentication
token = user_input[CONF_TOKEN]
self.api = growattServer.OpenApiV1(token=token)
# Verify token by fetching plant list
try:
plant_response = await self.hass.async_add_executor_job(self.api.plant_list)
self.plants = plant_response.get("plants", [])
except requests.exceptions.RequestException as ex:
_LOGGER.error("Network error during Growatt V1 API plant list: %s", ex)
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
except growattServer.GrowattV1ApiError as e:
_LOGGER.error(
"Growatt V1 API error: %s (Code: %s)",
e.error_msg or str(e),
getattr(e, "error_code", None),
)
return self._async_show_token_form({"base": ERROR_INVALID_AUTH})
except (ValueError, KeyError, TypeError, AttributeError) as ex:
_LOGGER.error(
"Invalid response format during Growatt V1 API plant list: %s", ex
)
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
self.data = user_input
self.data[CONF_AUTH_TYPE] = self.auth_type
return await self.async_step_plant()
@callback @callback
def _async_show_password_form( def _async_show_user_form(self, errors=None):
self, errors: dict[str, Any] | None = None """Show the form to the user."""
) -> ConfigFlowResult:
"""Show the username/password form to the user."""
data_schema = vol.Schema( data_schema = vol.Schema(
{ {
vol.Required(CONF_USERNAME): str, vol.Required(CONF_USERNAME): str,
@@ -144,87 +42,58 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
) )
return self.async_show_form( return self.async_show_form(
step_id="password_auth", data_schema=data_schema, errors=errors step_id="user", data_schema=data_schema, errors=errors
) )
@callback async def async_step_user(
def _async_show_token_form( self, user_input: dict[str, Any] | None = None
self, errors: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Show the API token form to the user.""" """Handle the start of the config flow."""
data_schema = vol.Schema( if not user_input:
{ return self._async_show_user_form()
vol.Required(CONF_TOKEN): str,
} # Initialise the library with the username & a random id each time it is started
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
)
self.api.server_url = user_input[CONF_URL]
login_response = await self.hass.async_add_executor_job(
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
) )
return self.async_show_form( if (
step_id="token_auth", not login_response["success"]
data_schema=data_schema, and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
errors=errors, ):
) return self._async_show_user_form({"base": "invalid_auth"})
self.user_id = login_response["user"]["id"]
self.data = user_input
return await self.async_step_plant()
async def async_step_plant( async def async_step_plant(
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Handle adding a "plant" to Home Assistant.""" """Handle adding a "plant" to Home Assistant."""
if self.auth_type == AUTH_API_TOKEN: plant_info = await self.hass.async_add_executor_job(
# Using V1 API with token self.api.plant_list, self.user_id
if not self.plants: )
return self.async_abort(reason=ABORT_NO_PLANTS)
# Create dictionary of plant_id -> name if not plant_info["data"]:
plant_dict = { return self.async_abort(reason="no_plants")
str(plant["plant_id"]): plant.get("name", "Unknown Plant")
for plant in self.plants
}
if user_input is None and len(plant_dict) > 1: plants = {plant["plantId"]: plant["plantName"] for plant in plant_info["data"]}
data_schema = vol.Schema(
{vol.Required(CONF_PLANT_ID): vol.In(plant_dict)}
)
return self.async_show_form(step_id="plant", data_schema=data_schema)
if user_input is None: if user_input is None and len(plant_info["data"]) > 1:
# Single plant => mark it as selected data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
user_input = {CONF_PLANT_ID: list(plant_dict.keys())[0]}
user_input[CONF_NAME] = plant_dict[user_input[CONF_PLANT_ID]] return self.async_show_form(step_id="plant", data_schema=data_schema)
else: if user_input is None:
# Traditional API # single plant => mark it as selected
try: user_input = {CONF_PLANT_ID: plant_info["data"][0]["plantId"]}
plant_info = await self.hass.async_add_executor_job(
self.api.plant_list, self.user_id
)
except requests.exceptions.RequestException as ex:
_LOGGER.error("Network error during Growatt API plant list: %s", ex)
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
# Access plant_info["data"] - validate response structure
if not isinstance(plant_info, dict) or "data" not in plant_info:
_LOGGER.error(
"Invalid response format during plant list: missing 'data' key"
)
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
plant_data = plant_info["data"]
if not plant_data:
return self.async_abort(reason=ABORT_NO_PLANTS)
plants = {plant["plantId"]: plant["plantName"] for plant in plant_data}
if user_input is None and len(plant_data) > 1:
data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
return self.async_show_form(step_id="plant", data_schema=data_schema)
if user_input is None:
# single plant => mark it as selected
user_input = {CONF_PLANT_ID: plant_data[0]["plantId"]}
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
await self.async_set_unique_id(user_input[CONF_PLANT_ID]) await self.async_set_unique_id(user_input[CONF_PLANT_ID])
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
self.data.update(user_input) self.data.update(user_input)

View File

@@ -4,16 +4,6 @@ from homeassistant.const import Platform
CONF_PLANT_ID = "plant_id" CONF_PLANT_ID = "plant_id"
# API key support
CONF_API_KEY = "api_key"
# Auth types for config flow
AUTH_PASSWORD = "password"
AUTH_API_TOKEN = "api_token"
CONF_AUTH_TYPE = "auth_type"
DEFAULT_AUTH_TYPE = AUTH_PASSWORD
DEFAULT_PLANT_ID = "0" DEFAULT_PLANT_ID = "0"
DEFAULT_NAME = "Growatt" DEFAULT_NAME = "Growatt"
@@ -39,10 +29,3 @@ DOMAIN = "growatt_server"
PLATFORMS = [Platform.SENSOR] PLATFORMS = [Platform.SENSOR]
LOGIN_INVALID_AUTH_CODE = "502" LOGIN_INVALID_AUTH_CODE = "502"
# Config flow error types (also used as abort reasons)
ERROR_CANNOT_CONNECT = "cannot_connect" # Used for both form errors and aborts
ERROR_INVALID_AUTH = "invalid_auth"
# Config flow abort reasons
ABORT_NO_PLANTS = "no_plants"

View File

@@ -1,7 +1,5 @@
"""Coordinator module for managing Growatt data fetching.""" """Coordinator module for managing Growatt data fetching."""
from __future__ import annotations
import datetime import datetime
import json import json
import logging import logging
@@ -40,30 +38,22 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
plant_id: str, plant_id: str,
) -> None: ) -> None:
"""Initialize the coordinator.""" """Initialize the coordinator."""
self.api_version = ( self.username = config_entry.data[CONF_USERNAME]
"v1" if config_entry.data.get("auth_type") == "api_token" else "classic" self.password = config_entry.data[CONF_PASSWORD]
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=self.username
) )
# Set server URL
self.api.server_url = self.url
self.device_id = device_id self.device_id = device_id
self.device_type = device_type self.device_type = device_type
self.plant_id = plant_id self.plant_id = plant_id
self.previous_values: dict[str, Any] = {}
if self.api_version == "v1": # Initialize previous_values to store historical data
self.username = None self.previous_values: dict[str, Any] = {}
self.password = None
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
self.token = config_entry.data["token"]
self.api = growattServer.OpenApiV1(token=self.token)
elif self.api_version == "classic":
self.username = config_entry.data.get(CONF_USERNAME)
self.password = config_entry.data[CONF_PASSWORD]
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=self.username
)
self.api.server_url = self.url
else:
raise ValueError(f"Unknown API version: {self.api_version}")
super().__init__( super().__init__(
hass, hass,
@@ -77,54 +67,21 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Update data via library synchronously.""" """Update data via library synchronously."""
_LOGGER.debug("Updating data for %s (%s)", self.device_id, self.device_type) _LOGGER.debug("Updating data for %s (%s)", self.device_id, self.device_type)
# login only required for classic API # Login in to the Growatt server
if self.api_version == "classic": self.api.login(self.username, self.password)
self.api.login(self.username, self.password)
if self.device_type == "total": if self.device_type == "total":
if self.api_version == "v1": total_info = self.api.plant_info(self.device_id)
# The V1 Plant APIs do not provide the same information as the classic plant_info() API del total_info["deviceList"]
# More specifically: plant_money_text, currency = total_info["plantMoneyText"].split("/")
# 1. There is no monetary information to be found, so today and lifetime money is not available total_info["plantMoneyText"] = plant_money_text
# 2. There is no nominal power, this is provided by inverter min_energy() total_info["currency"] = currency
# This means, for the total coordinator we can only fetch and map the following:
# todayEnergy -> today_energy
# totalEnergy -> total_energy
# invTodayPpv -> current_power
total_info = self.api.plant_energy_overview(self.plant_id)
total_info["todayEnergy"] = total_info["today_energy"]
total_info["totalEnergy"] = total_info["total_energy"]
total_info["invTodayPpv"] = total_info["current_power"]
else:
# Classic API: use plant_info as before
total_info = self.api.plant_info(self.device_id)
del total_info["deviceList"]
plant_money_text, currency = total_info["plantMoneyText"].split("/")
total_info["plantMoneyText"] = plant_money_text
total_info["currency"] = currency
_LOGGER.debug("Total info for plant %s: %r", self.plant_id, total_info)
self.data = total_info self.data = total_info
elif self.device_type == "inverter": elif self.device_type == "inverter":
self.data = self.api.inverter_detail(self.device_id) self.data = self.api.inverter_detail(self.device_id)
elif self.device_type == "min":
# Open API V1: min device
try:
min_details = self.api.min_detail(self.device_id)
min_settings = self.api.min_settings(self.device_id)
min_energy = self.api.min_energy(self.device_id)
except growattServer.GrowattV1ApiError as err:
_LOGGER.error(
"Error fetching min device data for %s: %s", self.device_id, err
)
raise UpdateFailed(f"Error fetching min device data: {err}") from err
min_info = {**min_details, **min_settings, **min_energy}
self.data = min_info
_LOGGER.debug("min_info for device %s: %r", self.device_id, min_info)
elif self.device_type == "tlx": elif self.device_type == "tlx":
tlx_info = self.api.tlx_detail(self.device_id) tlx_info = self.api.tlx_detail(self.device_id)
self.data = tlx_info["data"] self.data = tlx_info["data"]
_LOGGER.debug("tlx_info for device %s: %r", self.device_id, tlx_info)
elif self.device_type == "storage": elif self.device_type == "storage":
storage_info_detail = self.api.storage_params(self.device_id) storage_info_detail = self.api.storage_params(self.device_id)
storage_energy_overview = self.api.storage_energy_overview( storage_energy_overview = self.api.storage_energy_overview(
@@ -188,7 +145,7 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return self.data.get("currency") return self.data.get("currency")
def get_data( def get_data(
self, entity_description: GrowattSensorEntityDescription self, entity_description: "GrowattSensorEntityDescription"
) -> str | int | float | None: ) -> str | int | float | None:
"""Get the data.""" """Get the data."""
variable = entity_description.api_key variable = entity_description.api_key

View File

@@ -51,7 +51,7 @@ async def async_setup_entry(
sensor_descriptions: list = [] sensor_descriptions: list = []
if device_coordinator.device_type == "inverter": if device_coordinator.device_type == "inverter":
sensor_descriptions = list(INVERTER_SENSOR_TYPES) sensor_descriptions = list(INVERTER_SENSOR_TYPES)
elif device_coordinator.device_type in ("tlx", "min"): elif device_coordinator.device_type == "tlx":
sensor_descriptions = list(TLX_SENSOR_TYPES) sensor_descriptions = list(TLX_SENSOR_TYPES)
elif device_coordinator.device_type == "storage": elif device_coordinator.device_type == "storage":
sensor_descriptions = list(STORAGE_SENSOR_TYPES) sensor_descriptions = list(STORAGE_SENSOR_TYPES)

View File

@@ -2,42 +2,26 @@
"config": { "config": {
"abort": { "abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_plants": "No plants have been found on this account" "no_plants": "No plants have been found on this account"
}, },
"error": { "error": {
"invalid_auth": "Authentication failed. Please check your credentials and try again.", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
"cannot_connect": "Cannot connect to Growatt servers. Please check your internet connection and try again."
}, },
"step": { "step": {
"user": {
"title": "Choose authentication method",
"description": "Note: API Token authentication is currently only supported for MIN/TLX devices. For other device types, please use Username & Password authentication.",
"menu_options": {
"password_auth": "Username & Password",
"token_auth": "API Token (MIN/TLX only)"
}
},
"password_auth": {
"title": "Enter your Growatt login credentials",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"url": "[%key:common::config_flow::data::url%]"
}
},
"token_auth": {
"title": "Enter your API token",
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
"data": {
"token": "API Token"
}
},
"plant": { "plant": {
"data": { "data": {
"plant_id": "Plant" "plant_id": "Plant"
}, },
"title": "Select your plant" "title": "Select your plant"
},
"user": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]",
"url": "[%key:common::config_flow::data::url%]"
},
"title": "Enter your Growatt information"
} }
} }
}, },

View File

@@ -4,14 +4,9 @@ from uuid import UUID
from habiticalib import Habitica from habiticalib import Habitica
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import ( from homeassistant.helpers import config_validation as cv, device_registry as dr
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey from homeassistant.util.hass_dict import HassKey
@@ -32,7 +27,6 @@ PLATFORMS = [
Platform.BUTTON, Platform.BUTTON,
Platform.CALENDAR, Platform.CALENDAR,
Platform.IMAGE, Platform.IMAGE,
Platform.NOTIFY,
Platform.SENSOR, Platform.SENSOR,
Platform.SWITCH, Platform.SWITCH,
Platform.TODO, Platform.TODO,
@@ -52,7 +46,6 @@ async def async_setup_entry(
"""Set up habitica from a config entry.""" """Set up habitica from a config entry."""
party_added_by_this_entry: UUID | None = None party_added_by_this_entry: UUID | None = None
device_reg = dr.async_get(hass) device_reg = dr.async_get(hass)
entity_registry = er.async_get(hass)
session = async_get_clientsession( session = async_get_clientsession(
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True) hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
@@ -103,15 +96,6 @@ async def async_setup_entry(
device.id, remove_config_entry_id=config_entry.entry_id device.id, remove_config_entry_id=config_entry.entry_id
) )
notify_entities = [
entry.entity_id
for entry in entity_registry.entities.values()
if entry.domain == NOTIFY_DOMAIN
and entry.config_entry_id == config_entry.entry_id
]
for entity_id in notify_entities:
entity_registry.async_remove(entity_id)
hass.config_entries.async_schedule_reload(config_entry.entry_id) hass.config_entries.async_schedule_reload(config_entry.entry_id)
coordinator.async_add_listener(_party_update_listener) coordinator.async_add_listener(_party_update_listener)

View File

@@ -121,4 +121,4 @@ class HabiticaPartyBinarySensorEntity(HabiticaPartyBase, BinarySensorEntity):
@property @property
def is_on(self) -> bool | None: def is_on(self) -> bool | None:
"""If the binary sensor is on.""" """If the binary sensor is on."""
return self.coordinator.data.party.quest.active return self.coordinator.data.quest.active

View File

@@ -9,7 +9,6 @@ from datetime import timedelta
from io import BytesIO from io import BytesIO
import logging import logging
from typing import Any from typing import Any
from uuid import UUID
from aiohttp import ClientError from aiohttp import ClientError
from habiticalib import ( from habiticalib import (
@@ -49,14 +48,6 @@ class HabiticaData:
tasks: list[TaskData] tasks: list[TaskData]
@dataclass
class HabiticaPartyData:
"""Habitica party data."""
party: GroupData
members: dict[UUID, UserData]
type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator] type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator]
@@ -201,19 +192,11 @@ class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
return png.getvalue() return png.getvalue()
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[HabiticaPartyData]): class HabiticaPartyCoordinator(HabiticaBaseCoordinator[GroupData]):
"""Habitica Party Coordinator.""" """Habitica Party Coordinator."""
_update_interval = timedelta(minutes=15) _update_interval = timedelta(minutes=15)
async def _update_data(self) -> HabiticaPartyData: async def _update_data(self) -> GroupData:
"""Fetch the latest party data.""" """Fetch the latest party data."""
return (await self.habitica.get_group()).data
return HabiticaPartyData(
party=(await self.habitica.get_group()).data,
members={
member.id: member
for member in (await self.habitica.get_group_members()).data
if member.id
},
)

View File

@@ -68,14 +68,14 @@ class HabiticaPartyBase(CoordinatorEntity[HabiticaPartyCoordinator]):
super().__init__(coordinator) super().__init__(coordinator)
if TYPE_CHECKING: if TYPE_CHECKING:
assert config_entry.unique_id assert config_entry.unique_id
unique_id = f"{config_entry.unique_id}_{coordinator.data.party.id!s}" unique_id = f"{config_entry.unique_id}_{coordinator.data.id!s}"
self.entity_description = entity_description self.entity_description = entity_description
self._attr_unique_id = f"{unique_id}_{entity_description.key}" self._attr_unique_id = f"{unique_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo( self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE, entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER, manufacturer=MANUFACTURER,
model=NAME, model=NAME,
name=coordinator.data.party.summary, name=coordinator.data.summary,
identifiers={(DOMAIN, unique_id)}, identifiers={(DOMAIN, unique_id)},
via_device=(DOMAIN, config_entry.unique_id), via_device=(DOMAIN, config_entry.unique_id),
) )

View File

@@ -174,9 +174,6 @@
}, },
"collected_items": { "collected_items": {
"default": "mdi:sack" "default": "mdi:sack"
},
"last_checkin": {
"default": "mdi:login-variant"
} }
}, },
"switch": { "switch": {
@@ -197,11 +194,6 @@
"quest_running": { "quest_running": {
"default": "mdi:script-text-play" "default": "mdi:script-text-play"
} }
},
"notify": {
"party_chat": {
"default": "mdi:forum"
}
} }
}, },
"services": { "services": {

View File

@@ -128,7 +128,7 @@ class HabiticaPartyImage(HabiticaPartyBase, ImageEntity):
"""Return URL of image.""" """Return URL of image."""
return ( return (
f"{ASSETS_URL}quest_{key}.png" f"{ASSETS_URL}quest_{key}.png"
if (key := self.coordinator.data.party.quest.key) if (key := self.coordinator.data.quest.key)
else None else None
) )

Some files were not shown because too many files have changed in this diff Show More