Compare commits

..

142 Commits

Author SHA1 Message Date
Franck Nijhof
bb98ed6633 2025.10.3 (#154718) 2025-10-17 23:14:01 +02:00
Franck Nijhof
59dace572a Bump version to 2025.10.3 2025-10-17 20:35:30 +00:00
cdnninja
735cf36a5b Bump pyvesync version to 3.1.2 (#154650)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-17 20:34:48 +00:00
tstabrawa
90b0f50b8f Move URL out of Nuheat strings.json (#154580) 2025-10-17 20:34:47 +00:00
Simone Chemelli
e731c07b77 Bump aioamazondevices to 6.4.4 (#154538) 2025-10-17 20:34:46 +00:00
Whitney Young
2c75635e95 OpenUV: Fix update by skipping when protection window is null (#154487) 2025-10-17 20:34:45 +00:00
Anuj Soni
1f031695c2 Move translatable URLs out of strings.json for isy994 (#154464) 2025-10-17 20:34:43 +00:00
Michel van de Wetering
fb279212a9 Add missinglong_press entry for trigger_type in strings.json for Hue (#154437) 2025-10-17 20:34:42 +00:00
DannyS95
45869523d0 Move igloohome API access URL into constant placeholders (#154430) 2025-10-17 20:34:41 +00:00
puddly
a753926f22 Use async_schedule_reload instead of async_reload for ZHA (#154397) 2025-10-17 20:34:40 +00:00
Simone Chemelli
dc874ff53a Bump aiocomelit to 1.1.2 (#154393) 2025-10-17 20:34:38 +00:00
Renat Sibgatulin
3ef6865708 Bump aioairq to 0.4.7 (#154386) 2025-10-17 20:34:37 +00:00
Anuj Soni
7f1989f9f2 Move translatable URLs out of strings.json for huawei lte (#154368)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-17 20:34:36 +00:00
epenet
97e338c760 Move URL out of sfr_box strings.json (#154364)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-17 20:34:35 +00:00
wollew
101679c17d update pysqueezebox lib to 0.13.0 (#154358) 2025-10-17 20:34:33 +00:00
tronikos
bc784c356e Bump opower to 0.15.7 (#154351) 2025-10-17 20:34:32 +00:00
J. Nick Koston
556cc57d8b Fix Bluetooth discovery for devices with alternating advertisement names (#154347) 2025-10-17 20:34:31 +00:00
Oliver Gründel
eef6e96a93 Move developer url out of strings.json for coinbase setup flow (#154339) 2025-10-17 20:34:30 +00:00
Shai Ungar
56d237af7f Move URLs out of SABnzbd strings.json (#154333)
Co-authored-by: Claude <noreply@anthropic.com>
2025-10-17 20:34:29 +00:00
Oliver Gründel
e5d1902d2a Move Ecobee authorization URL out of strings.json (#154332) 2025-10-17 20:34:27 +00:00
Yevhenii Vaskivskyi
a9a203678e AsusWRT: Pass only online clients to the device list from the API (#154322) 2025-10-17 20:34:26 +00:00
Mick Vleeshouwer
7f6237cc63 Move URL out of Overkiz Config Flow descriptions (#154315) 2025-10-17 20:34:25 +00:00
Simone Chemelli
5468e691ca Bump aioamazondevices to 6.4.3 (#154293) 2025-10-17 20:34:23 +00:00
Jan-Philipp Benecke
67cbbc3522 Move Electricity Maps url out of strings.json (#154284) 2025-10-17 20:33:17 +00:00
Dan Schafer
504da54c11 Update Snoo strings.json to include weaning_baseline (#154268) 2025-10-17 20:31:13 +00:00
Jordan Harvey
cdda2ef5c8 Bump pyprobeplus to 1.1.0 (#154265) 2025-10-17 20:31:12 +00:00
Jan Bouwhuis
f405f9eb4b Fix home wiziard total increasing sensors returning 0 (#154264) 2025-10-17 20:31:10 +00:00
Manu
634f71835a Add description placeholders to pyLoad config flow (#154254) 2025-10-17 20:31:09 +00:00
Manu
49bfb01fac Add description placeholders in Uptime Kuma config flow (#154252)
Signed-off-by: tr4nt0r <4445816+tr4nt0r@users.noreply.github.com>
2025-10-17 20:31:08 +00:00
Joakim Plate
ad8f7fdcab Move url like strings to placeholders for nibe (#154249) 2025-10-17 20:31:07 +00:00
J. Nick Koston
f82ec81062 Fix Yale integration to handle unavailable OAuth implementation at startup (#154245) 2025-10-17 20:31:05 +00:00
J. Nick Koston
03b0842a01 Fix August integration to handle unavailable OAuth implementation at startup (#154244) 2025-10-17 20:31:04 +00:00
Christopher Fenner
13e5cb5cc8 Remove URL from ViCare strings.json (#154243) 2025-10-17 20:31:03 +00:00
Shay Levy
f18cdaf4d8 Move URL out of Switcher strings.json (#154240) 2025-10-17 20:31:02 +00:00
Andrew Jackson
5b3bca1426 Move URL out of Mastodon strings.json (#154231) 2025-10-17 20:31:01 +00:00
Andrew Jackson
d812e9d43c Move URL out of Mealie strings.json (#154230) 2025-10-17 20:30:59 +00:00
Simone Chemelli
fa1071b221 Bump aioamazondevices to 6.4.1 (#154228) 2025-10-17 20:30:58 +00:00
Paul Bottein
e48c2c6c0b Bump frontend 20251001.4 (#154218) 2025-10-17 20:23:01 +00:00
Yvan13120
bddd4100c0 Fix state class for Overkiz water consumption (#154164) 2025-10-17 20:23:00 +00:00
srirams
70d8df2e95 Remove redudant state write in Smart Meter Texas (#154126) 2025-10-17 20:22:58 +00:00
Lennart Coopmans
08b3dd0173 PushSafer: Handle empty data section properly (#154109) 2025-10-17 20:22:57 +00:00
Magnus
6723a7c4e1 Bump aioasuswrt to 1.5.1 (#153209) 2025-10-17 20:22:55 +00:00
Franck Nijhof
40d7f2a89e 2025.10.2 (#154181) 2025-10-10 23:19:19 +02:00
Shay Levy
13b717e2da Fix shelly remove orphaned entities (#154182)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-10 22:46:30 +02:00
Franck Nijhof
5fcfd3ad84 Bump version to 2025.10.2 2025-10-10 20:29:17 +00:00
Shay Levy
324a7b5443 Fix Shelly RPC cover update when the device is not initialized (#154159) 2025-10-10 20:27:30 +00:00
Robert Resch
491ae8f72c Bump deebot-client to 15.1.0 (#154154) 2025-10-10 20:23:10 +00:00
Justus
259247892f IOmeter bump version v0.2.0 (#154150) 2025-10-10 20:23:09 +00:00
Bram Kragten
caeda0ef64 Update frontend to 20251001.2 (#154143) 2025-10-10 20:23:08 +00:00
Paul Bottein
df35c535e4 Add missing entity category and icons for smlight integration (#154131) 2025-10-10 20:23:07 +00:00
Paulus Schoutsen
f93b9e0ed0 Z-Wave: ESPHome discovery to update all options (#154113) 2025-10-10 20:23:05 +00:00
peteS-UK
48a3372cf2 Fix for multiple Lyrion Music Server on a single Home Assistant server for Squeezebox (#154081) 2025-10-10 20:23:04 +00:00
Maciej Bieniek
d84fd72428 Bump brother to version 5.1.1 (#154080) 2025-10-10 20:23:03 +00:00
Simone Chemelli
e8cb386962 Bump aioamazondevices to 6.4.0 (#154071) 2025-10-10 20:23:02 +00:00
epenet
5ac726703c Filter out invalid Renault vehicles (#154070)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-10 20:23:00 +00:00
Joost Lekkerkerker
688649a799 Don't mark ZHA coordinator as via_device with itself (#154004) 2025-10-10 20:17:07 +00:00
Artur Pragacz
c5359ade3e Fix empty llm api list in chat log (#153996) 2025-10-10 20:17:05 +00:00
Michael Davie
4e60dedc1b Bump env-canada to 0.11.3 (#153967) 2025-10-10 20:17:04 +00:00
Maciej Bieniek
221d74f83a Fix update interval for AccuWeather hourly forecast (#153957) 2025-10-10 20:17:02 +00:00
G Johansson
fbbb3d6415 Bump holidays to 0.82 (#153952) 2025-10-10 20:17:01 +00:00
Josef Zweck
8297019011 Bump pylamarzocco to 2.1.2 (#153950) 2025-10-10 20:16:59 +00:00
TheJulianJES
61715dcff3 Adjust OTBR config entry name for ZBT-2 (#153940) 2025-10-10 20:16:58 +00:00
TheJulianJES
32b822ee99 Fix HA hardware configuration message for Thread without HAOS (#153933) 2025-10-10 20:16:56 +00:00
Fabien Kleinbourg
e6c2e0ad80 sharkiq dependency bump to 1.4.2 (#153931) 2025-10-10 20:16:55 +00:00
TheJulianJES
1314427dc5 Do not auto-set up ZHA zeroconf discoveries during onboarding (#153914) 2025-10-10 20:16:53 +00:00
Tom Matheussen
bf499a45f7 Add missing translation string for Satel Integra subentry type (#153905) 2025-10-10 20:16:52 +00:00
Christopher Fenner
b955e22628 fix typo in icon assignment of AccuWeather integration (#153890) 2025-10-10 20:16:50 +00:00
Simone Chemelli
1b222ff5fd Fix restore cover state for Comelit SimpleHome (#153887) 2025-10-10 20:16:49 +00:00
derytive
f0510e703f Add plate_count for Miele KM7575 (#153868) 2025-10-10 19:36:03 +00:00
G Johansson
cbe3956e15 Handle timeout errors gracefully in Nord Pool services (#153856) 2025-10-10 19:36:01 +00:00
Aaron Bach
4588e9da8d Limit SimpliSafe websocket connection attempts during startup (#153853)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-10-10 19:36:00 +00:00
Simone Chemelli
5445890fdf Bump aiocomelit to 1.1.1 (#153843) 2025-10-10 19:35:59 +00:00
Simone Chemelli
9b49f77f86 Fix PIN validation for Comelit SimpleHome (#153840) 2025-10-10 19:35:57 +00:00
Petro31
566c8fb786 Fix delay_on and auto_off with multiple triggers (#153839) 2025-10-10 19:35:56 +00:00
Joost Lekkerkerker
b36150c213 Add motion presets to SmartThings AC (#153830) 2025-10-10 19:35:54 +00:00
Joost Lekkerkerker
809070d2ad Catch update exception in AirGradient (#153828) 2025-10-10 19:35:53 +00:00
Joost Lekkerkerker
f4339dc031 Bump pySmartThings to 3.3.1 (#153826) 2025-10-10 19:35:51 +00:00
epenet
f3b37d24b0 Fix Tuya cover position when only control is available (#153803) 2025-10-10 19:35:50 +00:00
Paulus Schoutsen
4c8348caa7 Handle ESPHome discoveries with uninitialized Z-Wave antennas (#153790) 2025-10-10 19:35:49 +00:00
cdnninja
b9e7c102ea vesync correct fan set modes (#153761) 2025-10-10 19:35:47 +00:00
Simone Chemelli
69d9fa89b7 Remove stale entities from Alexa Devices (#153759) 2025-10-10 19:35:46 +00:00
Simone Chemelli
6f3f5a5ec1 Bump aioamazondevices to 6.2.9 (#153756) 2025-10-10 19:35:44 +00:00
Simone Chemelli
5ecfeca90a Fix sensors availability check for Alexa Devices (#153743) 2025-10-10 19:35:43 +00:00
Sander Jochems
00e0570fd4 Upgrade python-melcloud to 0.1.2 (#153742) 2025-10-10 19:35:41 +00:00
Øyvind Matheson Wergeland
5a5b94f3af Synology DSM: Don't reinitialize API during configuration (#153739) 2025-10-10 19:35:40 +00:00
Maciej Bieniek
34f00d9b33 Align Shelly presencezone entity to the new API/firmware (#153737) 2025-10-10 19:35:39 +00:00
Tom
4cabc5b368 Bump airOS to 0.5.5 using formdata for v6 firmware (#153736) 2025-10-10 19:35:37 +00:00
tronikos
4045125422 Fix missing google_assistant_sdk.send_text_command (#153735) 2025-10-10 19:35:36 +00:00
Fredrik Erlandsson
d7393af76f Version bump pydaikin to 2.17.1 (#153726) 2025-10-10 19:35:34 +00:00
Fredrik Erlandsson
ad41386b27 Version bump pydaikin to 2.17.0 (#153718) 2025-10-10 19:35:33 +00:00
tronikos
62d17ea20c Bump opower to 0.15.6 (#153714) 2025-10-10 19:35:31 +00:00
peetersch
c4954731d0 Modbus Fix message_wait_milliseconds is no longer applied (#153709) 2025-10-10 19:35:30 +00:00
cdnninja
647723d3f0 Bump pyvesync to 3.1.0 (#153693) 2025-10-10 19:35:28 +00:00
Christopher Fenner
51c500e22c Fix ViCare pressure sensors missing unit of measurement (#153691) 2025-10-10 19:35:26 +00:00
Denis Shulyaka
f6fc13c1f2 Gemini: Use default model instead of recommended where applicable (#153676) 2025-10-10 19:35:25 +00:00
Jan Bouwhuis
0009a7a042 Fix MQTT Lock state reset to unknown when a reset payload is received (#153647) 2025-10-10 19:35:24 +00:00
Luke Lashley
a3d1aa28e7 Switch Roborock to v4 of the code login api (#153593) 2025-10-10 19:35:22 +00:00
Simone Chemelli
9f53eb9b76 Bump aioamazondevices to 6.2.8 (#153592) 2025-10-10 19:35:21 +00:00
Luke Lashley
f53a205ff3 Bump python-roborock to 2.50.2 (#153561) 2025-10-10 19:35:19 +00:00
NANI
d08517c3df Updated VRM client and accounted for missing forecasts (#153464) 2025-10-10 19:35:18 +00:00
Kinachi249
d7398a44a1 Bump PyCync to 0.4.1 (#153401) 2025-10-10 19:35:17 +00:00
Aidan Timson
9acfc0cb88 Fix power device classes for system bridge (#153201) 2025-10-10 19:35:15 +00:00
Hessel
1b3d21523a Wallbox fix Rate Limit issue for multiple chargers (#153074) 2025-10-10 19:35:14 +00:00
puddly
1d407d1326 Prevent reloading the ZHA integration while adapter firmware is being updated (#152626) 2025-10-10 19:35:12 +00:00
Franck Nijhof
013346cead 2025.10.1 (#153582) 2025-10-03 20:08:44 +02:00
Franck Nijhof
5abaabc9da Bump version to 2025.10.1 2025-10-03 17:26:37 +00:00
Paulus Schoutsen
32481312c3 When discovering a Z-Wave adapter, always configure add-on in config flow (#153575) 2025-10-03 17:26:16 +00:00
Paulus Schoutsen
bdc9eb37d3 Z-Wave to support migrating from USB to socket with same home ID (#153522) 2025-10-03 17:26:15 +00:00
Abílio Costa
e0afcbc02b Debounce updates in Idasen Desk (#153503) 2025-10-03 17:26:13 +00:00
puddly
cd56a6a98d Bump universal-silabs-flasher to 0.0.35 (#153500) 2025-10-03 17:26:11 +00:00
cdnninja
9d85893bbb Fix VeSync zero fan speed handling (#153493)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-10-03 17:26:10 +00:00
starkillerOG
9e8a70225f Bump reolink-aio to 0.16.1 (#153489) 2025-10-03 17:26:08 +00:00
Daniel Hjelseth Høyer
96ec795d5e Bump pyTibber to 0.32.2 (#153484) 2025-10-03 17:26:07 +00:00
Josef Zweck
65b796070d Fix missing parameter pass in onedrive (#153478) 2025-10-03 17:26:05 +00:00
Aidan Timson
32994812e5 Update OVOEnergy to 3.0.1 (#153476) 2025-10-03 17:26:04 +00:00
G Johansson
66ff9d63a3 Fix next event in workday calendar (#153465) 2025-10-03 17:26:02 +00:00
Joost Lekkerkerker
b2a63d4996 Add translation for turbo fan mode in SmartThings (#153445)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-03 17:26:00 +00:00
puddly
f9f37b7f2a Disable baudrate bootloader reset for ZBT-2 (#153443) 2025-10-03 17:25:59 +00:00
Stefan Agner
7bdd9dd38a Update Home Assistant base image to 2025.10.0 (#153441) 2025-10-03 17:25:58 +00:00
Joost Lekkerkerker
1e8aae0a89 Fix missing powerconsumptionreport in Smartthings (#153438) 2025-10-03 17:25:56 +00:00
Aidan Timson
cf668e9dc2 Add missing translation for media browser default title (#153430)
Co-authored-by: Erwin Douna <e.douna@gmail.com>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-10-03 17:25:55 +00:00
Norbert Rittel
2e91c8700f Fix sentence-casing in user-facing strings of slack (#153427) 2025-10-03 17:25:53 +00:00
J. Nick Koston
9d14627daa Bump aiohomekit to 3.2.19 (#153423) 2025-10-03 17:25:52 +00:00
TheJulianJES
73b8283748 Fix Z-Wave RGB light turn on causing rare ZeroDivisionError (#153422) 2025-10-03 17:25:50 +00:00
Manu
edeaaa2e63 Update markdown field description in ntfy integration (#153421) 2025-10-03 17:25:49 +00:00
Tom Matheussen
d26dd8fc39 Fix Satel Integra creating new binary sensors on YAML import (#153419) 2025-10-03 17:25:47 +00:00
Denis Shulyaka
34640ea735 Disable thinking for unsupported gemini models (#153415) 2025-10-03 17:25:46 +00:00
Erwin Douna
46a2e21ef0 Bump pyportainer 1.0.3 (#153413) 2025-10-03 17:25:45 +00:00
Erwin Douna
508af53e72 Bump pyportainer 1.0.2 (#153326) 2025-10-03 17:25:43 +00:00
Josef Zweck
5f7440608c Increase onedrive upload chunk size (#153406) 2025-10-03 17:22:10 +00:00
Michael J. Kidd
0d1aa38a26 Pushover: Handle empty data section properly (#153397) 2025-10-03 17:22:08 +00:00
Luke Lashley
929f8c148a Bump python-roborock to 2.49.1 (#153396) 2025-10-03 17:22:07 +00:00
Joakim Plate
92db1f5a04 Correct blocking update in ToGrill with lack of notifications (#153387) 2025-10-03 17:22:05 +00:00
starkillerOG
e66b5ce0bf Add Roborock mop intensity translations (#153380) 2025-10-03 17:22:03 +00:00
Michael
1e17150e9f Explicit pass in the config entry to coordinator in airtouch4 (#153361)
Co-authored-by: Josef Zweck <josef@zweck.dev>
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-03 17:22:02 +00:00
Michael
792902de3d Set config entry to None in ProxmoxVE (#153357) 2025-10-03 17:22:00 +00:00
Andre Lengwenus
04d78c3dd5 Explicitly check for None in raw value processing of modbus (#153352) 2025-10-03 17:21:59 +00:00
G Johansson
5c8d5bfb84 Fix Nord Pool 15 minute interval (#153350) 2025-10-03 17:21:57 +00:00
puddly
99bff31869 Do not reset the adapter twice during ZHA options flow migration (#153345) 2025-10-03 17:21:56 +00:00
Stefan Agner
d949119fb0 Bump aiohasupervisor to 0.3.3 (#153344) 2025-10-03 17:21:54 +00:00
Tom
e7b737ece5 Bump airOS module for alternative login url (#153317) 2025-10-03 17:21:52 +00:00
Tom
fb8ddac2e8 Bump airOS dependency (#153065) 2025-10-03 17:21:51 +00:00
649 changed files with 10282 additions and 32468 deletions

View File

@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -257,7 +257,7 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -332,14 +332,14 @@ jobs:
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -504,7 +504,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.11"
HA_SHORT_VERSION: "2025.10"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -570,7 +570,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -711,7 +711,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency review
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
with:
license-check: false # We use our own license audit checks
@@ -741,7 +741,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: .mypy_cache
key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
with:
path: venv
fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with:
category: "/language:python"

View File

@@ -203,7 +203,6 @@ homeassistant.components.feedreader.*
homeassistant.components.file_upload.*
homeassistant.components.filesize.*
homeassistant.components.filter.*
homeassistant.components.firefly_iii.*
homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.*
@@ -326,7 +325,6 @@ homeassistant.components.london_underground.*
homeassistant.components.lookin.*
homeassistant.components.lovelace.*
homeassistant.components.luftdaten.*
homeassistant.components.lunatone.*
homeassistant.components.madvr.*
homeassistant.components.manual.*
homeassistant.components.mastodon.*

10
CODEOWNERS generated
View File

@@ -492,8 +492,6 @@ build.json @home-assistant/supervisor
/tests/components/filesize/ @gjohansson-ST
/homeassistant/components/filter/ @dgomes
/tests/components/filter/ @dgomes
/homeassistant/components/firefly_iii/ @erwindouna
/tests/components/firefly_iii/ @erwindouna
/homeassistant/components/fireservicerota/ @cyberjunky
/tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP
@@ -762,8 +760,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
/homeassistant/components/intesishome/ @jnimmo
/homeassistant/components/iometer/ @MaestroOnICe
/tests/components/iometer/ @MaestroOnICe
/homeassistant/components/iometer/ @jukrebs
/tests/components/iometer/ @jukrebs
/homeassistant/components/ios/ @robbiet480
/tests/components/ios/ @robbiet480
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
@@ -910,8 +908,6 @@ build.json @home-assistant/supervisor
/homeassistant/components/luci/ @mzdrale
/homeassistant/components/luftdaten/ @fabaff @frenck
/tests/components/luftdaten/ @fabaff @frenck
/homeassistant/components/lunatone/ @MoonDevLT
/tests/components/lunatone/ @MoonDevLT
/homeassistant/components/lupusec/ @majuss @suaveolent
/tests/components/lupusec/ @majuss @suaveolent
/homeassistant/components/lutron/ @cdheiser @wilburCForce
@@ -957,8 +953,6 @@ build.json @home-assistant/supervisor
/tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch
/homeassistant/components/meteoclimatic/ @adrianmo
/tests/components/meteoclimatic/ @adrianmo

View File

@@ -616,25 +616,12 @@ async def async_enable_logging(
),
)
logger = logging.getLogger()
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Log errors to a file if we have write access to file or config dir
if log_file is None:
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
if "SUPERVISOR" in os.environ:
_LOGGER.info("Running in Supervisor, not logging to file")
# Rename the default log file if it exists, since previous versions created
# it even on Supervisor
if os.path.isfile(default_log_path):
with contextlib.suppress(OSError):
os.rename(default_log_path, f"{default_log_path}.old")
err_log_path = None
else:
err_log_path = default_log_path
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
else:
err_log_path = os.path.abspath(log_file)
if err_log_path:
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
@@ -648,7 +635,10 @@ async def async_enable_logging(
)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger = logging.getLogger()
logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path

View File

@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
}
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)

View File

@@ -1,6 +1,9 @@
{
"entity": {
"sensor": {
"air_quality": {
"default": "mdi:air-filter"
},
"cloud_ceiling": {
"default": "mdi:weather-fog"
},
@@ -34,9 +37,6 @@
"thunderstorm_probability_night": {
"default": "mdi:weather-lightning"
},
"translation_key": {
"default": "mdi:air-filter"
},
"tree_pollen": {
"default": "mdi:tree-outline"
},

View File

@@ -1,7 +1,9 @@
"""Airgradient Update platform."""
from datetime import timedelta
import logging
from airgradient import AirGradientConnectionError
from propcache.api import cached_property
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
@@ -13,6 +15,7 @@ from .entity import AirGradientEntity
PARALLEL_UPDATES = 1
SCAN_INTERVAL = timedelta(hours=1)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
@@ -31,6 +34,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Representation of Airgradient Update."""
_attr_device_class = UpdateDeviceClass.FIRMWARE
_server_unreachable_logged = False
def __init__(self, coordinator: AirGradientCoordinator) -> None:
"""Initialize the entity."""
@@ -47,10 +51,27 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
"""Return the installed version of the entity."""
return self.coordinator.data.measures.firmware_version
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._attr_available
async def async_update(self) -> None:
"""Update the entity."""
try:
self._attr_latest_version = (
await self.coordinator.client.get_latest_firmware_version(
self.coordinator.serial_number
)
)
except AirGradientConnectionError:
self._attr_latest_version = None
self._attr_available = False
if not self._server_unreachable_logged:
_LOGGER.error(
"Unable to connect to AirGradient server to check for updates"
)
self._server_unreachable_logged = True
else:
self._server_unreachable_logged = False
self._attr_available = True

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
@@ -15,7 +14,7 @@ from airos.exceptions import (
)
import voluptuous as vol
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
@@ -25,11 +24,6 @@ from homeassistant.const import (
)
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOS8
@@ -60,45 +54,26 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
MINOR_VERSION = 2
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
self,
user_input: dict[str, Any] | None = None,
) -> ConfigFlowResult:
"""Handle the manual input of host and credentials."""
self.errors = {}
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
validated_info = await self._validate_and_get_device_info(user_input)
if validated_info:
return self.async_create_entry(
title=validated_info["title"],
data=validated_info["data"],
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
)
async def _validate_and_get_device_info(
self, config_data: dict[str, Any]
) -> dict[str, Any] | None:
"""Validate user input with the device API."""
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
self.hass,
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8(
host=config_data[CONF_HOST],
username=config_data[CONF_USERNAME],
password=config_data[CONF_PASSWORD],
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
await airos_device.login()
@@ -108,59 +83,21 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
self.errors["base"] = "cannot_connect"
errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
self.errors["base"] = "invalid_auth"
errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
self.errors["base"] = "key_data_missing"
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception during credential validation")
self.errors["base"] = "unknown"
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return {"title": airos_data.host.hostname, "data": config_data}
return None
async def async_step_reauth(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm(user_input)
async def async_step_reauth_confirm(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
self.errors = {}
if user_input:
validate_data = {**self._get_reauth_entry().data, **user_input}
if await self._validate_and_get_device_info(config_data=validate_data):
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates=validate_data,
return self.async_create_entry(
title=airos_data.host.hostname, data=user_input
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
errors=self.errors,
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)

View File

@@ -14,7 +14,7 @@ from airos.exceptions import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.exceptions import ConfigEntryError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
try:
await self.airos_device.login()
return await self.airos_device.status()
except AirOSConnectionAuthenticationError as err:
except (AirOSConnectionAuthenticationError,) as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryAuthFailed(
raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.5.4"]
"requirements": ["airos==0.5.5"]
}

View File

@@ -2,14 +2,6 @@
"config": {
"flow_title": "Ubiquiti airOS device",
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::airos::config::step::user::data_description::password%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
@@ -42,9 +34,7 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"entity": {

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioairq"],
"requirements": ["aioairq==0.4.6"]
"requirements": ["aioairq==0.4.7"]
}

View File

@@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
URL_API_INTEGRATION = {
"url": "https://dashboard.airthings.com/integrations/api-integration"
}
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Airthings."""
@@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders=URL_API_INTEGRATION,
description_placeholders={
"url": (
"https://dashboard.airthings.com/integrations/api-integration"
),
},
)
errors = {}
@@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
description_placeholders=URL_API_INTEGRATION,
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)

View File

@@ -4,10 +4,10 @@
"user": {
"data": {
"id": "ID",
"secret": "Secret"
},
"secret": "Secret",
"description": "Login at {url} to find your credentials"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",

View File

@@ -171,7 +171,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="no_devices_found")
titles = {
address: get_name(discovery.device)
address: discovery.device.name
for (address, discovery) in self._discovered_devices.items()
}
return self.async_show_form(

View File

@@ -114,8 +114,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
),
}
PARALLEL_UPDATES = 0
@callback
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:

View File

@@ -6,9 +6,6 @@
"description": "[%key:component::bluetooth::config::step::user::description%]",
"data": {
"address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "The Airthings devices discovered via Bluetooth."
}
},
"bluetooth_confirm": {

View File

@@ -18,7 +18,9 @@ from homeassistant.components.binary_sensor import (
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.helpers.entity_registry as er
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import async_update_unique_id
@@ -51,11 +53,47 @@ BINARY_SENSORS: Final = (
),
is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False
device.online
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
),
),
)
DEPRECATED_BINARY_SENSORS: Final = (
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: False,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: False,
),
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -66,6 +104,8 @@ async def async_setup_entry(
coordinator = entry.runtime_data
entity_registry = er.async_get(hass)
# Replace unique id for "detectionState" binary sensor
await async_update_unique_id(
hass,
@@ -75,6 +115,16 @@ async def async_setup_entry(
"detectionState",
)
# Clean up deprecated sensors
for sensor_desc in DEPRECATED_BINARY_SENSORS:
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{sensor_desc.key}"
if entity_id := entity_registry.async_get_entity_id(
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
):
_LOGGER.debug("Removing deprecated entity %s", entity_id)
entity_registry.async_remove(entity_id)
known_devices: set[str] = set()
def _check_device() -> None:

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.7"]
"requirements": ["aioamazondevices==6.4.4"]
}

View File

@@ -32,7 +32,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
device.online
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
)
@@ -40,9 +42,9 @@ SENSORS: Final = (
AmazonSensorEntityDescription(
key="temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement_fn=lambda device, _key: (
native_unit_of_measurement_fn=lambda device, key: (
UnitOfTemperature.CELSIUS
if device.sensors[_key].scale == "CELSIUS"
if key in device.sensors and device.sensors[key].scale == "CELSIUS"
else UnitOfTemperature.FAHRENHEIT
),
state_class=SensorStateClass.MEASUREMENT,

View File

@@ -18,7 +18,11 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import alexa_api_call, async_update_unique_id
from .utils import (
alexa_api_call,
async_remove_dnd_from_virtual_group,
async_update_unique_id,
)
PARALLEL_UPDATES = 1
@@ -29,7 +33,9 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
is_on_fn: Callable[[AmazonDevice], bool]
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
device.online
and (sensor := device.sensors.get(key)) is not None
and sensor.error is False
)
method: str
@@ -58,6 +64,9 @@ async def async_setup_entry(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
)
# Remove DND switch from virtual groups
await async_remove_dnd_from_virtual_group(hass, coordinator)
known_devices: set[str] = set()
def _check_device() -> None:

View File

@@ -4,8 +4,10 @@ from collections.abc import Awaitable, Callable, Coroutine
from functools import wraps
from typing import Any, Concatenate
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er
@@ -61,3 +63,21 @@ async def async_update_unique_id(
# Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
async def async_remove_dnd_from_virtual_group(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
) -> None:
"""Remove entity DND from virtual group."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-do_not_disturb"
entity_id = entity_registry.async_get_entity_id(
DOMAIN, SWITCH_DOMAIN, unique_id
)
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
if entity_id and is_group:
entity_registry.async_remove(entity_id)
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)

View File

@@ -629,6 +629,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
devices_info.append(
{
"entities": [],
"entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version,
@@ -637,7 +638,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"model_id": device_entry.model_id,
"sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id,
"entities": [],
}
)

View File

@@ -7,13 +7,13 @@ from collections import namedtuple
from collections.abc import Awaitable, Callable, Coroutine
import functools
import logging
from typing import Any, cast
from typing import Any
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
from aiohttp import ClientSession
from asusrouter import AsusRouter, AsusRouterError
from asusrouter.config import ARConfigKey
from asusrouter.modules.client import AsusClient
from asusrouter.modules.client import AsusClient, ConnectionState
from asusrouter.modules.data import AsusData
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
from asusrouter.tools.connection import get_cookie_jar
@@ -219,7 +219,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
@property
def is_connected(self) -> bool:
"""Get connected status."""
return cast(bool, self._api.is_connected)
return self._api.is_connected
async def async_connect(self) -> None:
"""Connect to the device."""
@@ -235,8 +235,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
async def async_disconnect(self) -> None:
"""Disconnect to the device."""
if self._api is not None and self._protocol == PROTOCOL_TELNET:
self._api.connection.disconnect()
await self._api.async_disconnect()
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
"""Get list of connected devices."""
@@ -437,6 +436,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
if dev.connection is not None
and dev.description is not None
and dev.connection.ip_address is not None
and dev.state is ConnectionState.CONNECTED
}
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:

View File

@@ -2,7 +2,9 @@
from __future__ import annotations
from typing import Any
from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes",
@@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists."""
if raw is None:

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"]
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
}

View File

@@ -36,11 +36,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
raise ConfigEntryAuthFailed("Migration to OAuth required")
session = async_create_august_clientsession(hass)
try:
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
except ValueError as err:
raise ConfigEntryNotReady("OAuth implementation not available") from err
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
try:

View File

@@ -26,6 +26,9 @@ async def async_setup_entry(
if CONF_HOST in config_entry.data:
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
else:
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
@@ -33,11 +36,6 @@ async def async_setup_entry(
config_entry.runtime_data = coordinator
if CONF_HOST in config_entry.data:
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True

View File

@@ -17,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame
from homeassistant.util import slugify
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
from . import util
from .agent import BackupAgent
@@ -145,7 +144,7 @@ class DownloadBackupView(HomeAssistantView):
return Response(status=HTTPStatus.NOT_FOUND)
else:
stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream))
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
worker_done_event = asyncio.Event()
@@ -153,7 +152,7 @@ class DownloadBackupView(HomeAssistantView):
"""Call by the worker thread when it's done."""
hass.loop.call_soon_threadsafe(worker_done_event.set)
stream = AsyncIteratorWriter(hass.loop)
stream = util.AsyncIteratorWriter(hass)
worker = threading.Thread(
target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []],

View File

@@ -38,7 +38,6 @@ from homeassistant.helpers import (
)
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import util as backup_util
from .agent import (
@@ -73,6 +72,7 @@ from .models import (
)
from .store import BackupStore
from .util import (
AsyncIteratorReader,
DecryptedBackupStreamer,
EncryptedBackupStreamer,
make_backup_dir,
@@ -1525,7 +1525,7 @@ class BackupManager:
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
else:
backup_stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream))
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
try:
await self.hass.async_add_executor_job(
validate_password_stream, reader, password

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine
from concurrent.futures import CancelledError, Future
import copy
from dataclasses import dataclass, replace
from io import BytesIO
@@ -13,7 +14,7 @@ from pathlib import Path, PurePath
from queue import SimpleQueue
import tarfile
import threading
from typing import IO, Any, cast
from typing import IO, Any, Self, cast
import aiohttp
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
@@ -22,11 +23,6 @@ from homeassistant.backup_restore import password_to_key
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.async_iterator import (
Abort,
AsyncIteratorReader,
AsyncIteratorWriter,
)
from homeassistant.util.json import JsonObjectType, json_loads_object
from .const import BUF_SIZE, LOGGER
@@ -63,6 +59,12 @@ class BackupEmpty(DecryptError):
_message = "No tar files found in the backup."
class AbortCipher(HomeAssistantError):
"""Abort the cipher operation."""
_message = "Abort cipher operation."
def make_backup_dir(path: Path) -> None:
"""Create a backup directory if it does not exist."""
path.mkdir(exist_ok=True)
@@ -164,6 +166,106 @@ def validate_password(path: Path, password: str | None) -> bool:
return False
class AsyncIteratorReader:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
def abort(self) -> None:
"""Abort the reader."""
self._aborted = True
if self._next_future is not None:
self._next_future.cancel()
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
)
if self._aborted:
self._next_future.cancel()
raise AbortCipher
try:
self._buffer = self._next_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)
n -= len(chunk)
self._pos += len(chunk)
if self._pos == len(self._buffer):
self._buffer = None
return bytes(result)
def close(self) -> None:
"""Close the iterator."""
class AsyncIteratorWriter:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
return self
async def __anext__(self) -> bytes:
"""Get the next chunk from the iterator."""
if data := await self._queue.get():
return data
raise StopAsyncIteration
def abort(self) -> None:
"""Abort the writer."""
self._aborted = True
if self._write_future is not None:
self._write_future.cancel()
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
self._write_future = asyncio.run_coroutine_threadsafe(
self._queue.put(s), self._hass.loop
)
if self._aborted:
self._write_future.cancel()
raise AbortCipher
try:
self._write_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos += len(s)
return len(s)
def validate_password_stream(
input_stream: IO[bytes],
password: str | None,
@@ -240,7 +342,7 @@ def decrypt_backup(
finally:
# Write an empty chunk to signal the end of the stream
output_stream.write(b"")
except Abort:
except AbortCipher:
LOGGER.debug("Cipher operation aborted")
finally:
on_done(error)
@@ -328,7 +430,7 @@ def encrypt_backup(
finally:
# Write an empty chunk to signal the end of the stream
output_stream.write(b"")
except Abort:
except AbortCipher:
LOGGER.debug("Cipher operation aborted")
finally:
on_done(error)
@@ -455,8 +557,8 @@ class _CipherBackupStreamer:
self._hass.loop.call_soon_threadsafe(worker_status.done.set)
stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass.loop, stream)
writer = AsyncIteratorWriter(self._hass.loop)
reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass)
worker = threading.Thread(
target=self._cipher_func,
args=[

View File

@@ -73,12 +73,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
# Add the websocket and API client
entry.runtime_data = BangOlufsenData(websocket, client)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Start WebSocket connection once the platforms have been loaded.
# This ensures that the initial WebSocket notifications are dispatched to entities
# Start WebSocket connection
await client.connect_notifications(remote_control=True, reconnect=True)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True

View File

@@ -125,8 +125,7 @@ async def async_setup_entry(
async_add_entities(
new_entities=[
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
],
update_before_add=True,
]
)
# Register actions.
@@ -267,8 +266,34 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._software_status.software_version,
)
# Get overall device state once. This is handled by WebSocket events the rest of the time.
product_state = await self._client.get_product_state()
# Get volume information.
if product_state.volume:
self._volume = product_state.volume
# Get all playback information.
# Ensure that the metadata is not None upon startup
if product_state.playback:
if product_state.playback.metadata:
self._playback_metadata = product_state.playback.metadata
self._remote_leader = product_state.playback.metadata.remote_leader
if product_state.playback.progress:
self._playback_progress = product_state.playback.progress
if product_state.playback.source:
self._source_change = product_state.playback.source
if product_state.playback.state:
self._playback_state = product_state.playback.state
# Set initial state
if self._playback_state.value:
self._state = self._playback_state.value
self._attr_media_position_updated_at = utcnow()
# Get the highest resolution available of the given images.
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
# If the device has been updated with new sources, then the API will fail here.
await self._async_update_sources()

View File

@@ -3,12 +3,16 @@ beolink_allstandby:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_expand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
all_discovered:
required: false
@@ -33,6 +37,8 @@ beolink_join:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false
@@ -65,12 +71,16 @@ beolink_leave:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_unexpand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false

View File

@@ -68,12 +68,17 @@ class IntegrationMatchHistory:
manufacturer_data: bool
service_data: set[str]
service_uuids: set[str]
name: str
def seen_all_fields(
previous_match: IntegrationMatchHistory, advertisement_data: AdvertisementData
previous_match: IntegrationMatchHistory,
advertisement_data: AdvertisementData,
name: str,
) -> bool:
"""Return if we have seen all fields."""
if previous_match.name != name:
return False
if not previous_match.manufacturer_data and advertisement_data.manufacturer_data:
return False
if advertisement_data.service_data and (
@@ -122,10 +127,11 @@ class IntegrationMatcher:
device = service_info.device
advertisement_data = service_info.advertisement
connectable = service_info.connectable
name = service_info.name
matched = self._matched_connectable if connectable else self._matched
matched_domains: set[str] = set()
if (previous_match := matched.get(device.address)) and seen_all_fields(
previous_match, advertisement_data
previous_match, advertisement_data, name
):
# We have seen all fields so we can skip the rest of the matchers
return matched_domains
@@ -140,11 +146,13 @@ class IntegrationMatcher:
)
previous_match.service_data |= set(advertisement_data.service_data)
previous_match.service_uuids |= set(advertisement_data.service_uuids)
previous_match.name = name
else:
matched[device.address] = IntegrationMatchHistory(
manufacturer_data=bool(advertisement_data.manufacturer_data),
service_data=set(advertisement_data.service_data),
service_uuids=set(advertisement_data.service_uuids),
name=name,
)
return matched_domains

View File

@@ -8,7 +8,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
"requirements": ["brother==5.1.0"],
"requirements": ["brother==5.1.1"],
"zeroconf": [
{
"type": "_printer._tcp.local.",

View File

@@ -315,7 +315,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component))
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar")
frontend.async_register_built_in_panel(
hass, "calendar", "calendar", "hass:calendar"
)
websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete)

View File

@@ -51,6 +51,12 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_time_interval
@@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename"
ATTR_MEDIA_PLAYER: Final = "media_player"
ATTR_FORMAT: Final = "format"
# These constants are deprecated as of Home Assistant 2024.10
# Please use the StreamType enum instead.
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
class CameraEntityFeature(IntFlag):
"""Supported features of the camera entity."""
@@ -1105,3 +1117,11 @@ async def async_handle_record_service(
duration=service_call.data[CONF_DURATION],
lookback=service_call.data[CONF_LOOKBACK],
)
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -53,6 +53,7 @@ from .const import (
CONF_ACME_SERVER,
CONF_ALEXA,
CONF_ALIASES,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG,
CONF_FILTER,
@@ -129,6 +130,7 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,

View File

@@ -78,6 +78,7 @@ CONF_USER_POOL_ID = "user_pool_id"
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.2.0"],
"requirements": ["hass-nabucasa==1.1.1"],
"single_config_entry": true
}

View File

@@ -38,6 +38,10 @@ TYPE_SPECIFY_COUNTRY = "specify_country_code"
_LOGGER = logging.getLogger(__name__)
DESCRIPTION_PLACEHOLDER = {
"register_link": "https://electricitymaps.com/free-tier",
}
class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Co2signal."""
@@ -70,6 +74,7 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="user",
data_schema=data_schema,
description_placeholders=DESCRIPTION_PLACEHOLDER,
)
data = {CONF_API_KEY: user_input[CONF_API_KEY]}
@@ -179,4 +184,5 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN):
step_id=step_id,
data_schema=data_schema,
errors=errors,
description_placeholders=DESCRIPTION_PLACEHOLDER,
)

View File

@@ -1,106 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
The integration does not provide any actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
Stale docstring and test name: `test_form_home` and reusing result.
Extract `async_setup_entry` into own fixture.
Avoid importing `config_flow` in tests.
Test reauth with errors
config-flow:
status: todo
comment: |
The config flow misses data descriptions.
Remove URLs from data descriptions, they should be replaced with placeholders.
Make use of Electricity Maps zone keys in country code as dropdown.
Make use of location selector for coordinates.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: todo
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
`test_sensor` could use `snapshot_platform`
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: |
This integration connect to a single device per configuration entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -6,7 +6,7 @@
"location": "[%key:common::config_flow::data::location%]",
"api_key": "[%key:common::config_flow::data::access_token%]"
},
"description": "Visit https://electricitymaps.com/free-tier to request a token."
"description": "Visit the [Electricity Maps page]({register_link}) to request a token."
},
"coordinates": {
"data": {

View File

@@ -166,6 +166,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
"developer_url": "https://www.coinbase.com/developer-platform",
},
errors=errors,
)
@@ -195,6 +196,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
"developer_url": "https://www.coinbase.com/developer-platform",
},
errors=errors,
)

View File

@@ -11,7 +11,7 @@
},
"reauth_confirm": {
"title": "Update Coinbase API credentials",
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit the [Developer Platform]({developer_url}) to create new credentials for {account_name}.",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from asyncio.exceptions import TimeoutError
from collections.abc import Mapping
import re
from typing import Any
from aiocomelit import (
@@ -27,25 +28,20 @@ from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = "111111"
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
}
)
@@ -55,6 +51,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
api: ComelitCommonApi
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_PIN]):
raise InvalidPin
session = await async_client_session(hass)
if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
api = ComeliteSerialBridgeApi(
@@ -105,6 +104,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
@@ -146,6 +147,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
@@ -189,6 +192,8 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
@@ -210,3 +215,7 @@ class CannotConnect(HomeAssistantError):
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class InvalidPin(HomeAssistantError):
"""Error to indicate an invalid pin."""

View File

@@ -161,7 +161,7 @@ class ComelitSerialBridge(
entry: ComelitConfigEntry,
host: str,
port: int,
pin: int,
pin: str,
session: ClientSession,
) -> None:
"""Initialize the scanner."""
@@ -195,7 +195,7 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
entry: ComelitConfigEntry,
host: str,
port: int,
pin: int,
pin: str,
session: ClientSession,
) -> None:
"""Initialize the scanner."""

View File

@@ -7,7 +7,14 @@ from typing import Any, cast
from aiocomelit import ComelitSerialBridgeObject
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
from homeassistant.components.cover import (
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
CoverDeviceClass,
CoverEntity,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
@@ -62,7 +69,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
super().__init__(coordinator, device, config_entry_entry_id)
# Device doesn't provide a status so we assume UNKNOWN at first startup
self._last_action: int | None = None
self._last_state: str | None = None
def _current_action(self, action: str) -> bool:
"""Return the current cover action."""
@@ -98,7 +104,6 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
@bridge_api_call
async def _cover_set_state(self, action: int, state: int) -> None:
"""Set desired cover state."""
self._last_state = self.state
await self.coordinator.api.set_device_status(COVER, self._device.index, action)
self.coordinator.data[COVER][self._device.index].status = state
self.async_write_ha_state()
@@ -124,5 +129,10 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
await super().async_added_to_hass()
if last_state := await self.async_get_last_state():
self._last_state = last_state.state
if (state := await self.async_get_last_state()) is not None:
if state.state == STATE_CLOSED:
self._last_action = STATE_COVER.index(STATE_CLOSING)
if state.state == STATE_OPEN:
self._last_action = STATE_COVER.index(STATE_OPENING)
self._attr_is_closed = state.state == STATE_CLOSED

View File

@@ -8,5 +8,5 @@
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"quality_scale": "platinum",
"requirements": ["aiocomelit==0.12.3"]
"requirements": ["aiocomelit==1.1.2"]
}

View File

@@ -43,11 +43,13 @@
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
}
},

View File

@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the config component."""
frontend.async_register_built_in_panel(
hass, "config", "config", "mdi:cog", require_admin=True
hass, "config", "config", "hass:cog", require_admin=True
)
for panel in SECTIONS:

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Callable
from http import HTTPStatus
import logging
from typing import Any, NoReturn
from aiohttp import web
@@ -24,12 +23,7 @@ from homeassistant.helpers.data_entry_flow import (
FlowManagerResourceView,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.json import (
JSON_DUMP,
find_paths_unserializable_data,
json_bytes,
json_fragment,
)
from homeassistant.helpers.json import json_fragment
from homeassistant.loader import (
Integration,
IntegrationNotFound,
@@ -37,9 +31,6 @@ from homeassistant.loader import (
async_get_integrations,
async_get_loaded_integration,
)
from homeassistant.util.json import format_unserializable_data
_LOGGER = logging.getLogger(__name__)
@callback
@@ -411,40 +402,18 @@ def config_entries_flow_subscribe(
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
async_on_flow_init_remove
)
try:
serialized_flows = [
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
connection.send_message(
websocket_api.event_message(
msg["id"],
[
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
]
except (ValueError, TypeError):
# If we can't serialize, we'll filter out unserializable flows
serialized_flows = []
for flw in hass.config_entries.flow.async_progress():
if flw["context"]["source"] in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
):
continue
try:
serialized_flows.append(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
)
except (ValueError, TypeError):
_LOGGER.error(
"Unable to serialize to JSON. Bad data found at %s",
format_unserializable_data(
find_paths_unserializable_data(flw, dump=JSON_DUMP)
),
)
continue
connection.send_message(
websocket_api.messages.construct_event_message(
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]"))
],
)
)
connection.send_result(msg["id"])

View File

@@ -514,7 +514,7 @@ class ChatLog:
"""Set the LLM system prompt."""
llm_api: llm.APIInstance | None = None
if user_llm_hass_api is None:
if not user_llm_hass_api:
pass
elif isinstance(user_llm_hass_api, llm.API):
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/daikin",
"iot_class": "local_polling",
"loggers": ["pydaikin"],
"requirements": ["pydaikin==2.16.0"],
"requirements": ["pydaikin==2.17.1"],
"zeroconf": ["_dkapi._tcp.local."]
}

View File

@@ -32,7 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SOURCE: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_handle_source_entity_changes(
@@ -47,9 +46,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))

View File

@@ -140,7 +140,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1
MINOR_VERSION = 4

View File

@@ -6,13 +6,12 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
from homeassistant.const import CONF_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType
@@ -56,40 +55,19 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
_hass: HomeAssistant
_config: ConfigType
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config.
This is here just to satisfy the abstract class interface. It is never called.
"""
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
"""Validate device condition config."""
return await async_validate_device_automation_config(
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
)
async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition."""

View File

@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
self._attr_translation_key = "button"
self._attr_translation_placeholders = {"key": str(key)}
def sync_callback(self, message: tuple) -> None:
def _sync(self, message: tuple) -> None:
"""Update the binary sensor state."""
if (
message[0] == self._remote_control_property.element_uid

View File

@@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity):
)
self.subscriber: Subscriber | None = None
self.sync_callback = self._sync
self._value: float
@@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity):
self._device_instance.uid, self.subscriber
)
def sync_callback(self, message: tuple) -> None:
def _sync(self, message: tuple) -> None:
"""Update the state."""
if message[0] == self._attr_unique_id:
self._value = message[1]

View File

@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
"""
return f"{self._attr_unique_id}_{self._sensor_type}"
def sync_callback(self, message: tuple) -> None:
def _sync(self, message: tuple) -> None:
"""Update the consumption sensor state."""
if message[0] == self._attr_unique_id:
self._value = getattr(

View File

@@ -13,3 +13,8 @@ class Subscriber:
"""Initiate the subscriber."""
self.name = name
self.callback = callback
def update(self, message: str) -> None:
"""Trigger hass to update the device."""
_LOGGER.debug('%s got message "%s"', self.name, message)
self.callback(message)

View File

@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
"""Switch off the device."""
self._binary_switch_property.set(state=False)
def sync_callback(self, message: tuple) -> None:
def _sync(self, message: tuple) -> None:
"""Update the binary switch state and consumption."""
if message[0].startswith("devolo.BinarySwitch"):
self._attr_is_on = self._device_instance.binary_switch_property[

View File

@@ -56,16 +56,16 @@ async def async_setup_entry(
hostname = entry.data[CONF_HOSTNAME]
name = entry.data[CONF_NAME]
nameserver_ipv4 = entry.options[CONF_RESOLVER]
nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
resolver_ipv4 = entry.options[CONF_RESOLVER]
resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
port_ipv4 = entry.options[CONF_PORT]
port_ipv6 = entry.options[CONF_PORT_IPV6]
entities = []
if entry.data[CONF_IPV4]:
entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4))
entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4))
if entry.data[CONF_IPV6]:
entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6))
entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6))
async_add_entities(entities, update_before_add=True)
@@ -77,13 +77,11 @@ class WanIpSensor(SensorEntity):
_attr_translation_key = "dnsip"
_unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"})
resolver: aiodns.DNSResolver
def __init__(
self,
name: str,
hostname: str,
nameserver: str,
resolver: str,
ipv6: bool,
port: int,
) -> None:
@@ -92,11 +90,11 @@ class WanIpSensor(SensorEntity):
self._attr_unique_id = f"{hostname}_{ipv6}"
self.hostname = hostname
self.port = port
self.nameserver = nameserver
self._resolver = resolver
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
self._retries = DEFAULT_RETRIES
self._attr_extra_state_attributes = {
"resolver": nameserver,
"resolver": resolver,
"querytype": self.querytype,
}
self._attr_device_info = DeviceInfo(
@@ -106,13 +104,13 @@ class WanIpSensor(SensorEntity):
model=aiodns.__version__,
name=name,
)
self.resolver: aiodns.DNSResolver
self.create_dns_resolver()
def create_dns_resolver(self) -> None:
"""Create the DNS resolver."""
self.resolver = aiodns.DNSResolver(
nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port
)
self.resolver = aiodns.DNSResolver(tcp_port=self.port, udp_port=self.port)
self.resolver.nameservers = [self._resolver]
async def async_update(self) -> None:
"""Get the current DNS IP address for hostname."""

View File

@@ -61,5 +61,8 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="authorize",
errors=errors,
description_placeholders={"pin": self._ecobee.pin},
description_placeholders={
"pin": self._ecobee.pin,
"auth_url": "https://www.ecobee.com/consumerportal/index.html",
},
)

View File

@@ -8,7 +8,7 @@
}
},
"authorize": {
"description": "Please authorize this app at https://www.ecobee.com/consumerportal/index.html with PIN code:\n\n{pin}\n\nThen, select **Submit**."
"description": "Please authorize this app at {auth_url} with PIN code:\n\n{pin}\n\nThen, select **Submit**."
}
},
"error": {

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"]
}

View File

@@ -2,4 +2,3 @@ raw_get_positions:
target:
entity:
domain: vacuum
integration: ecovacs

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
"iot_class": "cloud_polling",
"loggers": ["env_canada"],
"requirements": ["env-canada==0.11.2"]
"requirements": ["env-canada==0.11.3"]
}

View File

@@ -10,6 +10,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Filter from a config entry."""
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -17,3 +18,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Filter config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -246,7 +246,6 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -1,27 +0,0 @@
"""The Firefly III integration."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Set up Firefly III from a config entry."""
coordinator = FireflyDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -1,140 +0,0 @@
"""Config flow for the Firefly III integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_URL): str,
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
vol.Required(CONF_API_KEY): str,
}
)
async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
"""Validate the user input allows us to connect."""
try:
client = Firefly(
api_url=data[CONF_URL],
api_key=data[CONF_API_KEY],
session=async_get_clientsession(hass),
)
await client.get_about()
except FireflyAuthenticationError:
raise InvalidAuth from None
except FireflyConnectionError as err:
raise CannotConnect from err
except FireflyTimeoutError as err:
raise FireflyClientTimeout from err
return True
class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Firefly III."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
try:
await _validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=user_input[CONF_URL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth when Firefly III API authentication fails."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth: ask for a new API key and validate."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
try:
await _validate_input(
self.hass,
data={
**reauth_entry.data,
CONF_API_KEY: user_input[CONF_API_KEY],
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class FireflyClientTimeout(HomeAssistantError):
"""Error to indicate a timeout occurred."""

View File

@@ -1,6 +0,0 @@
"""Constants for the Firefly III integration."""
DOMAIN = "firefly_iii"
MANUFACTURER = "Firefly III"
NAME = "Firefly III"

View File

@@ -1,137 +0,0 @@
"""Data Update Coordinator for Firefly III integration."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
from aiohttp import CookieJar
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
from pyfirefly.models import Account, Bill, Budget, Category, Currency
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type FireflyConfigEntry = ConfigEntry[FireflyDataUpdateCoordinator]
DEFAULT_SCAN_INTERVAL = timedelta(minutes=5)
@dataclass
class FireflyCoordinatorData:
"""Data structure for Firefly III coordinator data."""
accounts: list[Account]
categories: list[Category]
category_details: list[Category]
budgets: list[Budget]
bills: list[Bill]
primary_currency: Currency
class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]):
"""Coordinator to manage data updates for Firefly III integration."""
config_entry: FireflyConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: FireflyConfigEntry) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.firefly = Firefly(
api_url=self.config_entry.data[CONF_URL],
api_key=self.config_entry.data[CONF_API_KEY],
session=async_create_clientsession(
self.hass,
self.config_entry.data[CONF_VERIFY_SSL],
cookie_jar=CookieJar(unsafe=True),
),
)
async def _async_setup(self) -> None:
"""Set up the coordinator."""
try:
await self.firefly.get_about()
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
async def _async_update_data(self) -> FireflyCoordinatorData:
"""Fetch data from Firefly III API."""
now = datetime.now()
start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
end_date = now
try:
accounts = await self.firefly.get_accounts()
categories = await self.firefly.get_categories()
category_details = [
await self.firefly.get_category(
category_id=int(category.id), start=start_date, end=end_date
)
for category in categories
]
primary_currency = await self.firefly.get_currency_primary()
budgets = await self.firefly.get_budgets()
bills = await self.firefly.get_bills()
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
return FireflyCoordinatorData(
accounts=accounts,
categories=categories,
category_details=category_details,
budgets=budgets,
bills=bills,
primary_currency=primary_currency,
)

View File

@@ -1,40 +0,0 @@
"""Base entity for Firefly III integration."""
from __future__ import annotations
from yarl import URL
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import FireflyDataUpdateCoordinator
class FireflyBaseEntity(CoordinatorEntity[FireflyDataUpdateCoordinator]):
"""Base class for Firefly III entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize a Firefly entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
configuration_url=URL(coordinator.config_entry.data[CONF_URL]),
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.entity_description.key}",
)
},
)

View File

@@ -1,18 +0,0 @@
{
"entity": {
"sensor": {
"account_type": {
"default": "mdi:bank",
"state": {
"expense": "mdi:cash-minus",
"revenue": "mdi:cash-plus",
"asset": "mdi:account-cash",
"liability": "mdi:hand-coin"
}
},
"category": {
"default": "mdi:label"
}
}
}
}

View File

@@ -1,10 +0,0 @@
{
"domain": "firefly_iii",
"name": "Firefly III",
"codeowners": ["@erwindouna"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.6"]
}

View File

@@ -1,68 +0,0 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates:
status: exempt
comment: |
No explicit parallel updates are defined.
reauthentication-flow:
status: todo
comment: |
No reauthentication flow is defined. It will be done in a next iteration.
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -1,133 +0,0 @@
"""Sensor platform for Firefly III integration."""
from __future__ import annotations
from pyfirefly.models import Account, Category
from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.components.sensor.const import SensorDeviceClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
from .entity import FireflyBaseEntity
ACCOUNT_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="account_type",
translation_key="account",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
CATEGORY_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="category",
translation_key="category",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: FireflyConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Firefly III sensor platform."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = [
FireflyAccountEntity(
coordinator=coordinator,
entity_description=description,
account=account,
)
for account in coordinator.data.accounts
for description in ACCOUNT_SENSORS
]
entities.extend(
FireflyCategoryEntity(
coordinator=coordinator,
entity_description=description,
category=category,
)
for category in coordinator.data.category_details
for description in CATEGORY_SENSORS
)
async_add_entities(entities)
class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III account."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
account: Account,
) -> None:
"""Initialize Firefly account entity."""
super().__init__(coordinator, entity_description)
self._account = account
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{account.id}"
self._attr_name = account.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
# Account type state doesn't go well with the icons.json. Need to fix it.
if account.attributes.type == "expense":
self._attr_icon = "mdi:cash-minus"
elif account.attributes.type == "asset":
self._attr_icon = "mdi:account-cash"
elif account.attributes.type == "revenue":
self._attr_icon = "mdi:cash-plus"
elif account.attributes.type == "liability":
self._attr_icon = "mdi:hand-coin"
else:
self._attr_icon = "mdi:bank"
@property
def native_value(self) -> str | None:
"""Return the state of the sensor."""
return self._account.attributes.current_balance
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III category."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
category: Category,
) -> None:
"""Initialize Firefly category entity."""
super().__init__(coordinator, entity_description)
self._category = category
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{category.id}"
self._attr_name = category.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
spent_items = self._category.attributes.spent or []
earned_items = self._category.attributes.earned or []
spent = sum(float(item.sum) for item in spent_items if item.sum is not None)
earned = sum(float(item.sum) for item in earned_items if item.sum is not None)
if spent == 0 and earned == 0:
return None
return spent + earned

View File

@@ -1,49 +0,0 @@
{
"config": {
"step": {
"user": {
"data": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "[%key:common::config_flow::data::api_key%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "The API key for authenticating with Firefly",
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
},
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
},
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "The new API access token for authenticating with Firefly III"
},
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
}
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while trying to connect to the Firefly instance: {error}"
},
"invalid_auth": {
"message": "An error occurred while trying to authenticate: {error}"
},
"timeout_connect": {
"message": "A timeout occurred while trying to connect to the Firefly instance: {error}"
}
}
}

View File

@@ -452,10 +452,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.app.router.register_resource(IndexView(repo_path, hass))
async_register_built_in_panel(hass, "light")
async_register_built_in_panel(hass, "security")
async_register_built_in_panel(hass, "climate")
async_register_built_in_panel(hass, "profile")
async_register_built_in_panel(
@@ -463,7 +459,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"developer-tools",
require_admin=True,
sidebar_title="developer_tools",
sidebar_icon="mdi:hammer",
sidebar_icon="hass:hammer",
)
@callback

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20251001.0"]
"requirements": ["home-assistant-frontend==20251001.4"]
}

View File

@@ -1,10 +1,8 @@
load_url:
fields:
device_id:
required: true
selector:
target:
device:
integration: fully_kiosk
fields:
url:
example: "https://home-assistant.io"
required: true
@@ -12,12 +10,10 @@ load_url:
text:
set_config:
fields:
device_id:
required: true
selector:
target:
device:
integration: fully_kiosk
fields:
key:
example: "motionSensitivity"
required: true
@@ -30,14 +26,12 @@ set_config:
text:
start_application:
target:
device:
integration: fully_kiosk
fields:
application:
example: "de.ozerov.fully"
required: true
selector:
text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,10 +147,6 @@
"name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": {
"name": "[%key:common::config_flow::data::url%]",
"description": "URL to load."
@@ -161,10 +157,6 @@
"name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": {
"name": "Key",
"description": "Configuration parameter to set."
@@ -182,10 +174,6 @@
"application": {
"name": "Application",
"description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
}
}
}

View File

@@ -108,7 +108,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the humidifer,
@@ -141,7 +140,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -150,6 +148,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -187,6 +186,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -96,7 +96,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -35,7 +35,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HEATER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the heater, but
@@ -68,7 +67,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -77,6 +75,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -114,6 +113,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -104,7 +104,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -76,10 +76,6 @@ async def async_unload_entry(
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
) -> bool:
"""Unload a config entry."""
if not hass.config_entries.async_loaded_entries(DOMAIN):
for service_name in hass.services.async_services_for_domain(DOMAIN):
hass.services.async_remove(DOMAIN, service_name)
conversation.async_unset_agent(hass, entry)
return True

View File

@@ -26,7 +26,7 @@ from homeassistant.components.media_player import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
from homeassistant.helpers.event import async_call_later
@@ -68,7 +68,13 @@ async def async_send_text_commands(
) -> list[CommandResponse]:
"""Send text commands to Google Assistant Service."""
# There can only be 1 entry (config_flow has single_instance_allowed)
entry: GoogleAssistantSDKConfigEntry = hass.config_entries.async_entries(DOMAIN)[0]
entries = hass.config_entries.async_loaded_entries(DOMAIN)
if not entries:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_loaded",
)
entry: GoogleAssistantSDKConfigEntry = entries[0]
session = entry.runtime_data.session
try:

View File

@@ -1,4 +1,4 @@
"""Support for Google Assistant SDK."""
"""Services for the Google Assistant SDK integration."""
from __future__ import annotations

View File

@@ -65,6 +65,9 @@
}
},
"exceptions": {
"entry_not_loaded": {
"message": "Entry not loaded"
},
"grpc_error": {
"message": "Failed to communicate with Google Assistant"
}

View File

@@ -456,6 +456,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
"""Initialize the agent."""
self.entry = entry
self.subentry = subentry
self.default_model = default_model
self._attr_name = subentry.title
self._genai_client = entry.runtime_data
self._attr_unique_id = subentry.subentry_id
@@ -489,7 +490,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
tools = tools or []
tools.append(Tool(google_search=GoogleSearch()))
model_name = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model_name = options.get(CONF_CHAT_MODEL, self.default_model)
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
supports_system_instruction = (
"gemma" not in model_name
@@ -620,7 +621,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
def create_generate_content_config(self) -> GenerateContentConfig:
"""Create the GenerateContentConfig for the LLM."""
options = self.subentry.data
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model = options.get(CONF_CHAT_MODEL, self.default_model)
thinking_config: ThinkingConfig | None = None
if model.startswith("models/gemini-2.5") and not model.endswith(
("tts", "image", "image-preview")

View File

@@ -1,5 +1,7 @@
set_vacation:
target:
device:
integration: google_mail
entity:
integration: google_mail
fields:

View File

@@ -141,9 +141,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await hass.config_entries.async_forward_entry_setups(
entry, (entry.options["group_type"],)
)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -329,7 +329,6 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
@callback
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:

View File

@@ -1,18 +1,14 @@
"""The Growatt server PV inverter sensor integration."""
from collections.abc import Mapping
import logging
import growattServer
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_URL, CONF_USERNAME
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
from homeassistant.exceptions import ConfigEntryError
from .const import (
AUTH_API_TOKEN,
AUTH_PASSWORD,
CONF_AUTH_TYPE,
CONF_PLANT_ID,
DEFAULT_PLANT_ID,
DEFAULT_URL,
@@ -23,110 +19,36 @@ from .const import (
from .coordinator import GrowattConfigEntry, GrowattCoordinator
from .models import GrowattRuntimeData
_LOGGER = logging.getLogger(__name__)
def get_device_list_classic(
def get_device_list(
api: growattServer.GrowattApi, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
"""Retrieve the device list for the selected plant."""
plant_id = config[CONF_PLANT_ID]
# Log in to api and fetch first plant if no plant id is defined.
try:
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
# DEBUG: Log the actual response structure
except Exception as ex:
_LOGGER.error("DEBUG - Login response: %s", login_response)
raise ConfigEntryError(
f"Error communicating with Growatt API during login: {ex}"
) from ex
if not login_response.get("success"):
msg = login_response.get("msg", "Unknown error")
_LOGGER.debug("Growatt login failed: %s", msg)
if msg == LOGIN_INVALID_AUTH_CODE:
raise ConfigEntryAuthFailed("Username, Password or URL may be incorrect!")
raise ConfigEntryError(f"Growatt login failed: {msg}")
if (
not login_response["success"]
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
):
raise ConfigEntryError("Username, Password or URL may be incorrect!")
user_id = login_response["user"]["id"]
if plant_id == DEFAULT_PLANT_ID:
try:
plant_info = api.plant_list(user_id)
except Exception as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during plant list: {ex}"
) from ex
if not plant_info or "data" not in plant_info or not plant_info["data"]:
raise ConfigEntryError("No plants found for this account.")
plant_id = plant_info["data"][0]["plantId"]
# Get a list of devices for specified plant to add sensors for.
try:
devices = api.device_list(plant_id)
except Exception as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during device list: {ex}"
) from ex
return devices, plant_id
def get_device_list_v1(
api, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
"""Device list logic for Open API V1.
Note: Plant selection (including auto-selection if only one plant exists)
is handled in the config flow before this function is called. This function
only fetches devices for the already-selected plant_id.
"""
plant_id = config[CONF_PLANT_ID]
try:
devices_dict = api.device_list(plant_id)
except growattServer.GrowattV1ApiError as e:
raise ConfigEntryError(
f"API error during device list: {e} (Code: {getattr(e, 'error_code', None)}, Message: {getattr(e, 'error_msg', None)})"
) from e
devices = devices_dict.get("devices", [])
# Only MIN device (type = 7) support implemented in current V1 API
supported_devices = [
{
"deviceSn": device.get("device_sn", ""),
"deviceType": "min",
}
for device in devices
if device.get("type") == 7
]
for device in devices:
if device.get("type") != 7:
_LOGGER.warning(
"Device %s with type %s not supported in Open API V1, skipping",
device.get("device_sn", ""),
device.get("type"),
)
return supported_devices, plant_id
def get_device_list(
api, config: Mapping[str, str], api_version: str
) -> tuple[list[dict[str, str]], str]:
"""Dispatch to correct device list logic based on API version."""
if api_version == "v1":
return get_device_list_v1(api, config)
if api_version == "classic":
return get_device_list_classic(api, config)
raise ConfigEntryError(f"Unknown API version: {api_version}")
async def async_setup_entry(
hass: HomeAssistant, config_entry: GrowattConfigEntry
) -> bool:
"""Set up Growatt from a config entry."""
config = config_entry.data
username = config[CONF_USERNAME]
url = config.get(CONF_URL, DEFAULT_URL)
# If the URL has been deprecated then change to the default instead
@@ -136,24 +58,11 @@ async def async_setup_entry(
new_data[CONF_URL] = url
hass.config_entries.async_update_entry(config_entry, data=new_data)
# Determine API version
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
api_version = "v1"
token = config[CONF_TOKEN]
api = growattServer.OpenApiV1(token=token)
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
api_version = "classic"
username = config[CONF_USERNAME]
api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=username
)
# Initialise the library with the username & a random id each time it is started
api = growattServer.GrowattApi(add_random_user_id=True, agent_identifier=username)
api.server_url = url
else:
raise ConfigEntryError("Unknown authentication type in config entry.")
devices, plant_id = await hass.async_add_executor_job(
get_device_list, api, config, api_version
)
devices, plant_id = await hass.async_add_executor_job(get_device_list, api, config)
# Create a coordinator for the total sensors
total_coordinator = GrowattCoordinator(
@@ -166,7 +75,7 @@ async def async_setup_entry(
hass, config_entry, device["deviceSn"], device["deviceType"], plant_id
)
for device in devices
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min"]
if device["deviceType"] in ["inverter", "tlx", "storage", "mix"]
}
# Perform the first refresh for the total coordinator

Some files were not shown because too many files have changed in this diff Show More