Compare commits

...

227 Commits

Author SHA1 Message Date
jbouwh
98e0513866 Add MQTT select subentry support 2025-10-03 21:08:15 +00:00
jbouwh
972e643d88 Add MQTT number subentry support 2025-10-02 06:01:18 +00:00
starkillerOG
b0a08782e0 Add Roborock mop intensity translations (#153380) 2025-10-01 22:51:26 +02:00
G Johansson
6c9955f220 Remove deprecated constants in camera (#153363) 2025-10-01 22:20:34 +02:00
G Johansson
f56b94c0f9 Remove deprecated constants from media_player (#153366) 2025-10-01 22:20:07 +02:00
G Johansson
3cf035820b Remove deprecated state constants from lock (#153367) 2025-10-01 22:16:52 +02:00
Erik Montnemery
99a796d066 Remove legacy history queries from recorder (#153324) 2025-10-01 22:06:56 +02:00
Erik Montnemery
1cd1b1aba8 Remove to_native method from recorder database schemas (#153334) 2025-10-01 21:25:05 +02:00
Ståle Storø Hauknes
4131c14629 Add parallel updates to airthings_ble (#153315) 2025-10-01 20:14:23 +02:00
Tom
c2acda5796 Bump airOS module for alternative login url (#153317) 2025-10-01 20:11:35 +02:00
Marc Mueller
4806e7e9d9 Update cryptography to 46.0.2 (#153327) 2025-10-01 19:52:57 +02:00
Marc Mueller
76606fd44f Update types packages (#153330) 2025-10-01 19:51:37 +02:00
Andre Lengwenus
2983f1a3b6 Explicitly check for None in raw value processing of modbus (#153352) 2025-10-01 19:48:35 +02:00
Michael
8019779b3a Set config entry to None in ProxmoxVE (#153357) 2025-10-01 19:45:34 +02:00
Marc Mueller
62cdcbf422 Misc typing improvements (#153322) 2025-10-01 19:30:41 +02:00
Marc Mueller
b12a5a36e1 Update bcrpyt to 5.0.0 (#153325) 2025-10-01 20:07:45 +03:00
epenet
e32763e464 Add water heater fixture for Tuya tests (#153336) 2025-10-01 20:02:54 +03:00
Stefan Agner
b85cf3f9d2 Bump aiohasupervisor to 0.3.3 (#153344) 2025-10-01 20:01:53 +03:00
puddly
3777bcc2af Do not reset the adapter twice during ZHA options flow migration (#153345) 2025-10-01 18:22:41 +02:00
Maciej Bieniek
52cde48ff0 Add missing test for Shelly config flow (#153346) 2025-10-01 18:32:57 +03:00
Marc Mueller
bf1da35303 Update pyOpenSSL to 25.3.0 (#153329) 2025-10-01 17:32:08 +02:00
Erwin Douna
c1bf11da34 Bump pyportainer 1.0.2 (#153326) 2025-10-01 17:07:21 +02:00
Erwin Douna
3c20325b37 Bump pyfirefly 0.1.6 (#153335) 2025-10-01 17:06:31 +02:00
Maciej Bieniek
fd8ccb8d8f Improve mac_address_from_name() function to avoid double discovery of Shelly devices (#153343) 2025-10-01 16:49:27 +02:00
Michael Hansen
d76e947021 Bump intents to 2025.10.1 (#153340) 2025-10-01 09:39:08 -05:00
Erik Montnemery
c91ed96543 Use pytest.mark.usefixtures in history tests (#153306) 2025-10-01 15:53:55 +02:00
HarvsG
b164531ba8 Bayesian - add config entry tests (#153316) 2025-10-01 15:46:16 +02:00
Erik Montnemery
7c623a8704 Use pytest.mark.usefixtures in some recorder tests (#153313) 2025-10-01 15:38:51 +02:00
Maciej Bieniek
7ae3340336 Add test for full device snapshot for Shelly Wall Display XL (#153305) 2025-10-01 16:00:15 +03:00
Marc Mueller
653b73c601 Fix device_automation RuntimeWarning in tests (#153319) 2025-10-01 14:26:09 +02:00
Artur Pragacz
7c93d91bae Filter out service type devices in extended analytics (#153271) 2025-10-01 12:38:50 +02:00
Abílio Costa
07da0cfb2b Stop writing to config dir log file on supervised install (#146675)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-10-01 11:11:00 +01:00
Artur Pragacz
b411a11c2c Add analytics platform to esphome (#153311) 2025-10-01 12:08:50 +02:00
epenet
0555b84d05 Add new cover fixture for Tuya (#153310) 2025-10-01 12:01:37 +02:00
TheJulianJES
790bddef63 Improve ZHA multi-pan firmware repair text (#153232) 2025-10-01 11:50:01 +02:00
TheJulianJES
a3089b8aa7 Replace remaining ZHA "radio" strings with "adapter" (#153234) 2025-10-01 11:46:08 +02:00
puddly
77c8426d63 Use hardware bootloader reset methods for firmware config flows (#153277) 2025-10-01 11:43:28 +02:00
TheJulianJES
faf226f6c2 Fix ZHA unable to select "none" flow control (#153235) 2025-10-01 11:42:50 +02:00
HarvsG
06d143b81a Fix Bayesian ConfigFlow templates in 2025.10 (#153289)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-10-01 11:39:23 +02:00
Erik Montnemery
08b6a0a702 Add device class filter to switcher_kis services (#153248) 2025-10-01 12:27:17 +03:00
Bram Kragten
a20d1e3656 Update frontend to 20251001.0 (#153300) 2025-10-01 09:50:30 +02:00
Erwin Douna
36cc3682ca Add Firefly III integration (#147062)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-30 23:34:33 +02:00
Aviad Levy
1b495ecafa Add support for errored torrents in qBittorrent sensor (#153120)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-30 23:34:15 +02:00
puddly
7d1a0be07e Reduce Connect firmware install times by removing unnecessary firmware probing (#153012) 2025-09-30 22:41:51 +02:00
Geoffrey
327f65c991 Add switch domain to VegeHub integration (#148436)
Co-authored-by: GhoweVege <85890024+GhoweVege@users.noreply.github.com>
2025-09-30 22:38:05 +02:00
Manu
4ac89f6849 Add notify platform to Habitica (#150553) 2025-09-30 22:35:55 +02:00
Nojus
db3b070ed0 Add meteo_lt integration (#152948)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-30 22:17:36 +02:00
anishsane
6d940f476a Add support for Media player Mute/Unmute intents (#150508) 2025-09-30 14:37:19 -05:00
Erwin Douna
1ca701dda4 Portainer fix CONF_VERIFY_SSL (#153269)
Co-authored-by: Robert Resch <robert@resch.dev>
2025-09-30 21:36:04 +02:00
Joost Lekkerkerker
291c44100c Add Eltako brand (#153276) 2025-09-30 21:29:58 +02:00
Joost Lekkerkerker
c8d676e06b Add Konnected brand (#153280) 2025-09-30 21:27:43 +02:00
Joost Lekkerkerker
4c1ae0eddc Add Level brand (#153279) 2025-09-30 21:21:21 +02:00
Norbert Rittel
39eadc814f Replace "Climate name" with "Climate program" in ecobee action (#153264) 2025-09-30 21:16:37 +02:00
Robert Resch
f7ecad61ba Bump aioecowitt to 2025.9.2 (#153273) 2025-09-30 20:58:34 +02:00
Norbert Rittel
fa4cb54549 Fix sentence-casing in two title strings of roomba (#153281) 2025-09-30 20:51:44 +02:00
Manu
2be33c5e0a Update quality scale of ntfy integration to platinum 🏆️ (#151785) 2025-09-30 20:36:18 +02:00
LG-ThinQ-Integration
904d7e5d5a Add air/water filter state in percent to LG ThinQ (#152150)
Co-authored-by: yunseon.park <yunseon.park@lge.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-30 20:26:47 +02:00
Pete Sage
dbc4a65d48 Fix Sonos Dialog Select type conversion part II (#152491)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-30 20:25:19 +02:00
Pete Sage
b93f4aabf1 Add tests for Sonos media metadata (#152622) 2025-09-30 20:24:57 +02:00
Joost Lekkerkerker
9eaa40c7a4 Require cloud for Aladdin Connect (#153278)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-30 19:57:24 +02:00
Lucas Mindêllo de Andrade
b308a882fb Add Roomba J9 compatibility to the roomba integration (#145913)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-30 19:10:22 +02:00
Erik Montnemery
7f63ba2087 Improve saved state of RestoreSensor when using freezegun (#152740) 2025-09-30 18:27:56 +02:00
Erik Montnemery
d7269cfcc6 Use pytest_unordered in additional service helper tests (#153255) 2025-09-30 18:26:32 +02:00
starkillerOG
2850a574f6 Add Reolink floodlight event entities (#152564) 2025-09-30 17:59:12 +02:00
Samuel Xiao
dcb8d4f702 Add support model [relay switch 2pm] for switchbot cloud (#148381) 2025-09-30 17:49:32 +02:00
Samuel Xiao
aeadc0c4b0 Add lock support to Switchbot Cloud (#148310) 2025-09-30 17:48:38 +02:00
Nathan Spencer
683c6b17be Add release url to Litter-Robot 4 update entity (#152504) 2025-09-30 17:47:27 +02:00
Samuel Xiao
69dd5c91b7 Switchbot Cloud: Fix Roller Shade not work issue (#152528) 2025-09-30 17:05:23 +02:00
HarvsG
5cf7dfca8f Pihole better logging of update errors (#152077) 2025-09-30 16:59:03 +02:00
Marc Mueller
62a49d4244 Update pandas to 2.3.3 (#153251) 2025-09-30 16:58:41 +02:00
falconindy
93ee6322f2 snoo: add button entity for calling start_snoo (#151052)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-30 16:57:58 +02:00
Artur Pragacz
914990b58a Add analytics platform to wled (#153258) 2025-09-30 10:39:32 -04:00
Joakim Sørensen
f78bb5adb6 Bump hass-nabucasa from 1.1.2 to 1.2.0 (#153250) 2025-09-30 15:29:04 +02:00
Erik Montnemery
905f5e7289 Add device class filter to entity services (#153247) 2025-09-30 14:28:04 +01:00
Erik Montnemery
ec503618c3 Handle errors in WS manifest/list (#153256) 2025-09-30 15:12:41 +02:00
Erik Montnemery
7a41cbc314 Skip unserializable flows in WS config_entries/flow/subscribe (#153259) 2025-09-30 15:12:19 +02:00
Erik Montnemery
c58ba734e7 Correct target filter in osoenergy services (#153244) 2025-09-30 14:06:14 +02:00
Erik Montnemery
68f63be62f Correct target filter in litterrobot services (#153243) 2025-09-30 14:05:46 +02:00
Erik Montnemery
2aa4ca1351 Correct homekit service definition (#153242) 2025-09-30 14:04:09 +02:00
Imeon-Energy
fbabb27787 Add forecast energy sensor to Imeon inverter integration (#152176)
Co-authored-by: TheBushBoy <theodavid@icloud.com>
2025-09-30 13:35:18 +02:00
Markus Jacobsen
0960d78eb5 Use initial received WebSocket state in Bang & Olufsen (#152432) 2025-09-30 13:34:43 +02:00
andreimoraru
474b40511f Bump yt-dlp to 2025.09.26 (#153252) 2025-09-30 13:19:06 +02:00
Jan-Philipp Benecke
18b80aced3 Record current quality scale of Electricity Maps (#149241) 2025-09-30 11:38:16 +02:00
dependabot[bot]
b964d362b7 Bump docker/login-action from 3.5.0 to 3.6.0 (#153239)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-30 11:14:17 +02:00
G Johansson
3914e41f3c Rename resolver to nameserver in dnsip (#153223) 2025-09-30 10:46:59 +02:00
Erik Montnemery
82bdfcb99b Correct target filter in ecovacs services (#153241) 2025-09-30 10:39:18 +03:00
Marc Mueller
976cea600f Use attribute names for match class (#153191) 2025-09-29 23:12:54 +02:00
Tom
8c8713c3f7 Rework test split for airOS reauthentication flow (#153221) 2025-09-29 22:07:18 +02:00
G Johansson
2359ae6ce7 Bump pysmhi to 1.1.0 (#153222) 2025-09-29 22:04:59 +02:00
Paul Bottein
b570fd35c8 Replace legacy hass icons to mdi icons (#153204) 2025-09-29 20:04:21 +01:00
starkillerOG
9d94e6b3b4 Add Reolink bicycle sensitivity and delay (#153217) 2025-09-29 20:44:13 +02:00
Martin Hjelmare
cfab789823 Add hardware Zigbee flow strategy (#153190) 2025-09-29 20:08:43 +02:00
Erik Montnemery
81917425dc Add test which fails on duplicated statistics units (#153202)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
Co-authored-by: jbouwh <jan@jbsoft.nl>
2025-09-29 20:07:59 +02:00
Jan Bouwhuis
bfb62709d4 Add missing translation strings for added sensor device classes pm4 and reactive energy (#153215) 2025-09-29 19:55:09 +02:00
Joost Lekkerkerker
ca3f2ee782 Mark Konnected as Legacy (#153193) 2025-09-29 18:22:29 +01:00
Ludovic BOUÉ
fc8703a40f Matter DoorLock attributes (#151418)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-09-29 18:20:22 +01:00
c0ffeeca7
80517c7ac1 ZHA: rename radio to adapter (#153206) 2025-09-29 18:17:44 +01:00
Erik Montnemery
2b4b46eaf8 Add async_iterator util (#153194) 2025-09-29 18:54:23 +02:00
Martin Hjelmare
40b9dae608 Improve hardware flow strings (#153034) 2025-09-29 18:29:58 +02:00
Erik Montnemery
5975cd6e09 Revert "Add mg/m³ as a valid UOM for sensor/number Carbon Monoxide device class" (#153196) 2025-09-29 15:43:13 +01:00
RogerSelwyn
258c9ff52b Handle return result from ebusd being "empty" (#153199) 2025-09-29 16:08:42 +02:00
starkillerOG
89c5d498a4 Add Reolink Ai person type, vehicle type and animal type (#153170) 2025-09-29 15:39:29 +02:00
Artur Pragacz
76cb4d123a Filter out empty integration type in extended analytics (#153188) 2025-09-29 15:18:15 +02:00
Erik Montnemery
f0c29c7699 Revert "Add comment on conversion factor for Carbon monoxide on dependency molecular weight" (#153195) 2025-09-29 14:56:42 +02:00
Kyle Worrall
aa4151ced7 Fix for Hue Integration motion aware areas (#153079)
Co-authored-by: Marcel van der Veldt <m.vanderveldt@outlook.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-29 14:50:36 +02:00
G Johansson
0a6fa978fa Add timeout to dnsip (to handle stale connections) (#153086) 2025-09-29 14:49:38 +02:00
Simone Chemelli
dc02002b9d Bump aioamazondevices to 6.2.7 (#153185) 2025-09-29 14:30:42 +02:00
cdnninja
f071a3f38b Correct vesync water tank lifted key (#153173) 2025-09-29 14:29:25 +02:00
dependabot[bot]
b935231e47 Bump actions/dependency-review-action from 4.7.3 to 4.8.0 (#153180)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 13:17:20 +02:00
dependabot[bot]
b9f7613567 Bump github/codeql-action from 3.30.4 to 3.30.5 (#153179)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 13:15:53 +02:00
Maciej Bieniek
1289a031ab Add consumed energy sensor for Shelly pm1 and switch components (#153053) 2025-09-29 13:06:07 +03:00
Andrew Jackson
289546ef6d Bump aiomealie to 0.11.0 adding times to recipes (#153183) 2025-09-29 11:58:40 +02:00
Guido Schmitz
aacff4db5d Rework devolo Home Control config flow tests (#147083)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-29 09:47:07 +02:00
starkillerOG
f833b56122 Add Reolink siren state (#153169) 2025-09-29 08:42:38 +02:00
Tom Matheussen
7eb0f2993f Fix entities not being created when adding subentries for Satel Integra (#153139) 2025-09-28 21:37:35 -04:00
Michael
abb341abfe Add newly added cpu temperatures to diagnostics in FRITZ!Tools (#153168) 2025-09-28 22:40:10 +02:00
starkillerOG
0d90614369 Bump reolink-aio to 0.16.0 (#153161) 2025-09-28 21:55:39 +02:00
starkillerOG
ec84bebeea Add Reolink AI bicycle detection entity (#153163) 2025-09-28 21:54:59 +02:00
Shay Levy
9176867d6b Add Shelly EV charger sensors (#152722) 2025-09-28 22:45:11 +03:00
Allen Porter
281a137ff5 Add missing translations for Model Context Protocol integration (#153147) 2025-09-28 20:05:15 +02:00
tronikos
d6543480ac Refactor SQL integration (#153135) 2025-09-28 19:03:13 +02:00
Luca Graf
ae6391b866 Ignore gateway device in ViCare integration (#153097) 2025-09-28 16:04:22 +02:00
Joakim Plate
10b56e4258 Ensure togrill detects disconnected devices (#153067) 2025-09-28 15:34:56 +02:00
Erwin Douna
0ff2597957 Portainer add re-auth flow (#153077) 2025-09-28 15:31:50 +02:00
Artur Pragacz
026b28e962 Improve interview logging in Onkyo (#153095) 2025-09-28 15:26:40 +02:00
peteS-UK
9a1e67294a Extend timeout test in test_config_flow for Squeezebox to completion (#153080) 2025-09-28 15:20:47 +02:00
G Johansson
cdb448a5cc Use automatic reload options flow in random (#153103) 2025-09-28 01:02:33 +01:00
G Johansson
ab80e726e2 Use automatic reload options flow in filter (#153104) 2025-09-28 01:02:14 +01:00
G Johansson
2d5d0f67b2 Use automatic reload options flow in history_stats (#153115) 2025-09-28 01:01:33 +01:00
G Johansson
d4100b6096 Use automatic reload options flow in mold_indicator (#153106) 2025-09-28 01:00:48 +01:00
G Johansson
955e854d77 Use automatic reload options flow in utility_meter (#153111) 2025-09-28 01:00:07 +01:00
G Johansson
0c37f88c49 Use automatic reload options flow in derivative (#153112) 2025-09-28 00:59:07 +01:00
G Johansson
48167eeb9c Use automatic reload options flow in worldclock (#153105) 2025-09-28 00:58:20 +01:00
G Johansson
24177197f7 Use automatic reload options flow in generic_thermostat (#153108) 2025-09-28 00:57:12 +01:00
G Johansson
863fc0ba97 Use automatic reload options flow in switch_as_x (#153109) 2025-09-28 00:52:26 +01:00
G Johansson
9f7b229d02 Use automatic reload options flow in template (#153110) 2025-09-28 00:50:00 +01:00
G Johansson
ffd909f3d9 Use automatic reload options flow in group (#153116) 2025-09-28 00:48:44 +01:00
Tom
1ebf096a33 Add reauthentication flow to airOS (#153076)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-09-27 23:28:14 +02:00
Robert Resch
96d51965e5 Bump deebot-client to 15.0.0 (#153125) 2025-09-27 23:24:39 +02:00
G Johansson
04b510b020 Fix event range in workday calendar (#153128) 2025-09-27 23:22:39 +02:00
G Johansson
c9a301d50e Use automatic reload options flow in systemmonitor (#153107) 2025-09-27 20:50:14 +02:00
G Johansson
b304bd1a8b Use automatic reload options flow in local_file (#153114) 2025-09-27 20:49:39 +02:00
G Johansson
b99525b231 Use automatic reload options flow in tod (#153113) 2025-09-27 20:45:40 +02:00
G Johansson
634db13990 Use automatic reload options flow in trend (#153117) 2025-09-27 20:44:53 +02:00
peteS-UK
ad51a77989 Extend squeezebox config_flow test to completion (#153000)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-09-27 20:36:38 +02:00
G Johansson
3348a39e8a Use automatic reload options flow in generic_hygrostat (#153102) 2025-09-27 20:33:57 +02:00
Christian McHugh
81c2e356ec Fix: Set EPH climate heating as on only when boiler is actively heating (#152914) 2025-09-27 20:19:57 +02:00
Jan Bouwhuis
de6c3512d2 Add IMAP fetch message part feature (#152845) 2025-09-27 14:49:26 +02:00
G Johansson
36dc1e938a Fix can exclude optional holidays in workday (#153082) 2025-09-27 14:40:29 +02:00
Sören Beye
07a78cf6f7 Squeezebox: Proxy all the thumbnails (#147199)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-27 14:39:15 +02:00
Erwin Douna
eaa673e0c3 Portainer switch terminology to API token (#152958)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-27 14:32:25 +02:00
Simone Chemelli
f2c4ca081f Remove redundant code for Alexa Devices (#153083) 2025-09-27 13:05:07 +02:00
Thomas D
e3d707f0b4 Prevent duplicate entities for Volvo integration (#151779) 2025-09-27 12:29:11 +02:00
Tom
fb93fed2e5 Bump airOS dependency (#153065) 2025-09-27 01:20:51 +02:00
Björn Dalfors
95dfc2f23d Bump nibe dependency to 2.19.0 (#153062) 2025-09-26 23:49:40 +01:00
Franck Nijhof
408df2093a Update Home Assistant base image to 2025.09.3 (#153064) 2025-09-26 23:28:43 +02:00
Eskander Bejaoui
f32bf0cc3e nmap_tracker: Optimize default scan options (#153047) 2025-09-26 22:31:49 +02:00
peteS-UK
dbbe3145b6 Replace patch of entity_registry in test_config_flow for Squeezebox (#153039) 2025-09-26 22:17:47 +02:00
Erik Montnemery
f8bf3ea2ef Correct filter of target selector in motioneye services (#152971) 2025-09-26 22:08:19 +02:00
Bouwe Westerdijk
053bd31d43 Snapshot testing for Plugwise Switch platform (#153030) 2025-09-26 22:07:42 +02:00
DeerMaximum
1aefc3f37a NINA Use better wording for filters (#153050) 2025-09-26 22:05:10 +02:00
Joris Pelgröm
3de955d9ce Use UnitOfTime.DAYS instead of custom unit for LetPot number entity (#153054) 2025-09-26 21:58:17 +02:00
SapuSeven
0ff88fd366 Add None-check for VeSync fan device.state.display_status (#153055) 2025-09-26 21:57:01 +02:00
peteS-UK
eb84020773 Replace platform setup functions with fixtures with autouse in Squeezebox tests (#153057) 2025-09-26 21:49:58 +02:00
Tom
4bbfea3c7c Add SSL options during config_flow for airOS (#150325)
Co-authored-by: Åke Strandberg <ake@strandberg.eu>
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-26 21:38:27 +02:00
Josef Zweck
63d4fb7558 Ensure token validity in lamarzocco (#153058) 2025-09-26 21:36:03 +02:00
Artur Pragacz
953895cd81 Use satellite entity area in the assist pipeline (#153017) 2025-09-26 21:34:45 +02:00
Erwin Douna
a6c3f4efc0 Portainer add ability to skip SSL verification (#152955) 2025-09-26 21:32:49 +02:00
Paul Bottein
11e880d034 Update frontend to 20250926.0 (#153049) 2025-09-26 21:31:47 +02:00
Martin Hjelmare
e4d6bdb398 Fix Thread flow abort on multiple flows (#153048) 2025-09-26 18:48:51 +01:00
Andrew Jackson
6ced1783e3 Add discovery to Mealie (#151773)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-26 18:48:19 +02:00
Paulus Schoutsen
8051f78d10 Push ESPHome discovery to ZJS addon (#153004) 2025-09-26 10:12:56 -04:00
Josef Zweck
b724176b23 Bump pylamarzocco to 2.1.1 (#153027) 2025-09-26 15:46:24 +02:00
Erik Montnemery
fdca16ea92 Fix typing in ObjectSelectorConfig (#153043) 2025-09-26 15:18:18 +02:00
Stefan Agner
f8fd8b432a Update Home Assistant base image to 2025.09.2 (#153035) 2025-09-26 13:03:39 +02:00
lliwog
9148ae70ce Fix EZVIZ devices merging due to empty MAC addr (#152939) (#152981)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-26 12:47:11 +02:00
RogerSelwyn
447cb26d28 Protect against last_comms being None (#149366)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-26 12:35:04 +02:00
J. Nick Koston
2af36465f6 Bump aioesphomeapi to 41.11.0 (#153014) 2025-09-26 12:31:59 +02:00
dependabot[bot]
d5f7265424 Bump github/codeql-action from 3.30.3 to 3.30.4 (#153015)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-26 12:31:15 +02:00
Simone Chemelli
cc16af7f2d Code optimization for Uptime Robot (#153031) 2025-09-26 12:29:02 +02:00
Retha Runolfsson
7a4d75bc44 Add garage door opener for switchbot integration (#148460) 2025-09-26 12:11:59 +02:00
Bouwe Westerdijk
ec0380fd3b Snapshot testing for Plugwise Sensor platform (#153021) 2025-09-26 11:22:14 +02:00
Stefan Agner
b17cc71dfb Bump to home-assistant/wheels@2025.09.1 (#153025) 2025-09-26 11:04:02 +02:00
Erik Montnemery
89b327ed7b Remove device filter from target selector in bang_olufsen services (#152957) 2025-09-26 09:02:14 +02:00
Simone Chemelli
9bf361a1b8 Fix PIN failure if starting with 0 for Comelit SimpleHome (#152983) 2025-09-26 08:59:03 +02:00
J. Diego Rodríguez Royo
d11c171c75 Bump aiohomeconnect to version 0.20.0 (#153003) 2025-09-26 07:49:38 +02:00
puddly
c523c45d17 Allow ZHA discovery if discovery unique_id conflicts with config entry (#153009)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-26 07:39:00 +02:00
puddly
c1b9c0e1b6 Ignore discovery for existing ZHA entries (#152984) 2025-09-26 07:17:01 +02:00
puddly
487b9ff03e Bump ZHA to 0.0.73 (#153007) 2025-09-25 23:44:25 -04:00
Simone Chemelli
ec62b0cdfb Code optimization for Uptime Robot (#152993) 2025-09-26 00:34:09 +01:00
Brandon Harvey
6d0470064f Rename service to action in ESPHome (#152997) 2025-09-25 14:54:06 -05:00
Simone Chemelli
7450b3fd1a Improve tests for Alexa Devices (#152995) 2025-09-25 21:39:44 +02:00
Noah Husby
5b70910d77 Bump aiorussound to 4.8.2 (#152988) 2025-09-25 20:34:29 +02:00
Abílio Costa
52de5ff5ff Remove deprecated zone and event condition keys (#152986) 2025-09-25 19:23:40 +02:00
J. Nick Koston
c4389a1679 Bump aioesphomeapi to 41.10.0 (#152975)
Co-authored-by: Michael Hansen <mike@rhasspy.org>
2025-09-25 19:21:17 +02:00
Norbert Rittel
35faaa6cae Add missing square brackets to references in fully_kiosk actions (#152987) 2025-09-25 19:19:27 +02:00
Paul Bottein
3c0b13975a Update frontend to 20250925.1 (#152985) 2025-09-25 19:05:12 +02:00
Simone Chemelli
bc88696339 Remove deprecated sensors and update remaning for Alexa Devices (#151230) 2025-09-25 18:59:53 +02:00
Erik Montnemery
8f99c3f64a Remove device filter from target selector in lyric services (#152970) 2025-09-25 18:45:32 +02:00
Erik Montnemery
88016d96d4 Remove device and entity filter from target selector in homeassistant services (#152969) 2025-09-25 17:41:54 +01:00
Erik Montnemery
47df73b18f Remove device filter from target selector in google_mail services (#152968) 2025-09-25 18:32:12 +02:00
Maciej Bieniek
1c12d2b8cd Bump accuweather to version 4.2.2 (#152965) 2025-09-25 18:30:47 +02:00
Erik Montnemery
eb38837a8c Replace target selector with device selector in fully_kiosk services (#152959)
Co-authored-by: Franck Nijhof <git@frenck.dev>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-25 18:30:05 +02:00
Erik Montnemery
159c7fbfd1 Correct filter of target selector in sonos services (#152972) 2025-09-25 18:29:26 +02:00
Joost Lekkerkerker
7ee31f0884 Bump pySmartThings to 3.3.0 (#152977) 2025-09-25 17:57:30 +02:00
Daniel Potthast
0c5e12571a Update mvglive component (#146479)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-25 17:20:43 +02:00
Luke Lashley
9db973217f Fix incorrect Roborock test (#152980) 2025-09-25 17:18:24 +02:00
Artur Pragacz
cf1a745283 Move condition-specific fields into options (#152635) 2025-09-25 15:55:50 +02:00
peteS-UK
834e3f1963 Add HassKey for hass.data in Squeezebox (#149129) 2025-09-25 14:05:40 +02:00
Joakim Sørensen
3f8f7573c9 Bump hass-nabucasa from 1.1.1 to 1.1.2 (#152950) 2025-09-25 11:34:14 +01:00
Karsten Bade
0ae272f1f6 Add return types and docstring to sonos component (#152946) 2025-09-25 11:34:38 +02:00
Paul Bottein
8774295e2e Update frontend to 20250925.0 (#152945) 2025-09-25 11:33:01 +02:00
Erwin Douna
0c8d2594ef Portainer fix unique entity (#152941)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-25 09:49:22 +02:00
Simone Chemelli
205bd2676b Update IQS to platinum for Alexa Devices (#152905) 2025-09-25 09:45:50 +02:00
dependabot[bot]
25849fd9cc Bump actions/cache from 4.2.4 to 4.3.0 (#152934)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-25 09:43:03 +02:00
Sab44
7d6eac9ff7 Bump librehardwaremonitor-api to version 1.4.0 (#152938) 2025-09-25 09:42:31 +02:00
Luke Lashley
31017ebc98 Fix logical error when user has no Roborock maps (#152752) 2025-09-25 09:39:52 +02:00
Jimmy Zhening Luo
724a7b0ecc Quality: mark installation param doc as done (#152909) 2025-09-25 09:06:13 +02:00
Paulus Schoutsen
91e13d447a Prevent common control calling async methods from thread (#152931)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-09-24 23:09:54 -04:00
J. Nick Koston
7c8ad9d535 Fix ESPHome reauth not being triggered on incorrect password (#152911) 2025-09-24 22:27:40 -04:00
Franck Nijhof
9cd3ab853d Add block Spook < 4.0.0 as breaking Home Assistant (#152930) 2025-09-24 22:18:06 -04:00
Paulus Schoutsen
0b0f8c5829 Remove some more domains from common controls (#152927) 2025-09-24 22:15:29 -04:00
J. Nick Koston
ae7bc7fb1b Bump aioesphomeapi to 41.9.4 (#152923) 2025-09-24 19:16:48 -05:00
Franck Nijhof
09750872b5 Bump version to 2025.11.0dev0 (#152915) 2025-09-24 23:55:32 +02:00
Franck Nijhof
076e51017b Bump to home-assistant/wheels@2025.09.0 (#152920) 2025-09-24 23:12:20 +02:00
Simone Chemelli
95e7b00996 Update IQS to platinum for Comelit SimpleHome (#152906) 2025-09-24 22:03:31 +01:00
J. Nick Koston
ddecf1ac21 Bump aioesphomeapi to 41.9.3 to fix segfault (#152912) 2025-09-24 22:00:45 +01:00
571 changed files with 30658 additions and 6557 deletions

View File

@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -257,7 +257,7 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -332,14 +332,14 @@ jobs:
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -504,7 +504,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.10"
HA_SHORT_VERSION: "2025.11"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -570,7 +570,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -711,7 +711,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency review
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
with:
license-check: false # We use our own license audit checks
@@ -741,7 +741,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: .mypy_cache
key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
category: "/language:python"

View File

@@ -160,7 +160,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: home-assistant/wheels@2025.07.0
uses: home-assistant/wheels@2025.09.1
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -221,7 +221,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: home-assistant/wheels@2025.07.0
uses: home-assistant/wheels@2025.09.1
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

View File

@@ -203,6 +203,7 @@ homeassistant.components.feedreader.*
homeassistant.components.file_upload.*
homeassistant.components.filesize.*
homeassistant.components.filter.*
homeassistant.components.firefly_iii.*
homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.*

4
CODEOWNERS generated
View File

@@ -492,6 +492,8 @@ build.json @home-assistant/supervisor
/tests/components/filesize/ @gjohansson-ST
/homeassistant/components/filter/ @dgomes
/tests/components/filter/ @dgomes
/homeassistant/components/firefly_iii/ @erwindouna
/tests/components/firefly_iii/ @erwindouna
/homeassistant/components/fireservicerota/ @cyberjunky
/tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP
@@ -953,6 +955,8 @@ build.json @home-assistant/supervisor
/tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch
/homeassistant/components/meteoclimatic/ @adrianmo
/tests/components/meteoclimatic/ @adrianmo

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -616,34 +616,44 @@ async def async_enable_logging(
),
)
# Log errors to a file if we have write access to file or config dir
logger = logging.getLogger()
logger.setLevel(logging.INFO if verbose else logging.WARNING)
if log_file is None:
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
if "SUPERVISOR" in os.environ:
_LOGGER.info("Running in Supervisor, not logging to file")
# Rename the default log file if it exists, since previous versions created
# it even on Supervisor
if os.path.isfile(default_log_path):
with contextlib.suppress(OSError):
os.rename(default_log_path, f"{default_log_path}.old")
err_log_path = None
else:
err_log_path = default_log_path
else:
err_log_path = os.path.abspath(log_file)
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
if err_log_path:
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days
)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days
)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger.addHandler(err_handler)
logger = logging.getLogger()
logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
async_activate_log_queue_handler(hass)

View File

@@ -0,0 +1,5 @@
{
"domain": "eltako",
"name": "Eltako",
"iot_standards": ["matter"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "konnected",
"name": "Konnected",
"integrations": ["konnected", "konnected_esphome"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "level",
"name": "Level",
"iot_standards": ["matter"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==4.2.1"]
"requirements": ["accuweather==4.2.2"]
}

View File

@@ -4,10 +4,18 @@ from __future__ import annotations
from airos.airos8 import AirOS8
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [
@@ -21,13 +29,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(hass, verify_ssl=False)
session = async_get_clientsession(
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
)
airos_device = AirOS8(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=session,
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
@@ -40,6 +51,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
return True
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Migrate old config entry."""
if entry.version > 1:
# This means the user has downgraded from a future version
return False
if entry.version == 1 and entry.minor_version == 1:
new_data = {**entry.data}
advanced_data = {
CONF_SSL: DEFAULT_SSL,
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
}
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
hass.config_entries.async_update_entry(
entry,
data=new_data,
minor_version=2,
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
@@ -14,11 +15,23 @@ from airos.exceptions import (
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DOMAIN
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOS8
_LOGGER = logging.getLogger(__name__)
@@ -28,6 +41,15 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME, default="ubnt"): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
}
),
{"collapsed": True},
),
}
)
@@ -36,47 +58,109 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Ubiquiti airOS."""
VERSION = 1
MINOR_VERSION = 2
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
"""Handle the manual input of host and credentials."""
self.errors = {}
if user_input is not None:
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(self.hass, verify_ssl=False)
airos_device = AirOS8(
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
self._abort_if_unique_id_configured()
validated_info = await self._validate_and_get_device_info(user_input)
if validated_info:
return self.async_create_entry(
title=airos_data.host.hostname, data=user_input
title=validated_info["title"],
data=validated_info["data"],
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
)
async def _validate_and_get_device_info(
self, config_data: dict[str, Any]
) -> dict[str, Any] | None:
"""Validate user input with the device API."""
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
self.hass,
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8(
host=config_data[CONF_HOST],
username=config_data[CONF_USERNAME],
password=config_data[CONF_PASSWORD],
session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
self.errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
self.errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
self.errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception during credential validation")
self.errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return {"title": airos_data.host.hostname, "data": config_data}
return None
async def async_step_reauth(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm(user_input)
async def async_step_reauth_confirm(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
self.errors = {}
if user_input:
validate_data = {**self._get_reauth_entry().data, **user_input}
if await self._validate_and_get_device_info(config_data=validate_data):
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates=validate_data,
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
errors=self.errors,
)

View File

@@ -7,3 +7,8 @@ DOMAIN = "airos"
SCAN_INTERVAL = timedelta(minutes=1)
MANUFACTURER = "Ubiquiti"
DEFAULT_VERIFY_SSL = False
DEFAULT_SSL = True
SECTION_ADVANCED_SETTINGS = "advanced_settings"

View File

@@ -14,7 +14,7 @@ from airos.exceptions import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
try:
await self.airos_device.login()
return await self.airos_device.status()
except (AirOSConnectionAuthenticationError,) as err:
except AirOSConnectionAuthenticationError as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryError(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (

View File

@@ -2,11 +2,11 @@
from __future__ import annotations
from homeassistant.const import CONF_HOST
from homeassistant.const import CONF_HOST, CONF_SSL
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOSDataUpdateCoordinator
@@ -20,9 +20,14 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
super().__init__(coordinator)
airos_data = self.coordinator.data
url_schema = (
"https"
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
else "http"
)
configuration_url: str | None = (
f"https://{coordinator.config_entry.data[CONF_HOST]}"
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
)
self._attr_device_info = DeviceInfo(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.5.1"]
"requirements": ["airos==0.5.4"]
}

View File

@@ -2,6 +2,14 @@
"config": {
"flow_title": "Ubiquiti airOS device",
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::airos::config::step::user::data_description::password%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
@@ -12,6 +20,18 @@
"host": "IP address or hostname of the airOS device",
"username": "Administrator username for the airOS device, normally 'ubnt'",
"password": "Password configured through the UISP app or web interface"
},
"sections": {
"advanced_settings": {
"data": {
"ssl": "Use HTTPS",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"ssl": "Whether the connection should be encrypted (required for most devices)",
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
}
}
}
}
},
@@ -22,7 +42,9 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
}
},
"entity": {

View File

@@ -114,6 +114,8 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
),
}
PARALLEL_UPDATES = 0
@callback
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:

View File

@@ -22,6 +22,17 @@ class OAuth2FlowHandler(
VERSION = CONFIG_FLOW_VERSION
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Check we have the cloud integration set up."""
if "cloud" not in self.hass.config.components:
return self.async_abort(
reason="cloud_not_enabled",
description_placeholders={"default_config": "default_config"},
)
return await super().async_step_user(user_input)
async def async_step_reauth(
self, user_input: Mapping[str, Any]
) -> ConfigFlowResult:

View File

@@ -24,7 +24,8 @@
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.",
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"

View File

@@ -10,6 +10,7 @@ from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
@@ -20,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import async_update_unique_id
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -31,6 +33,7 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
is_on_fn: Callable[[AmazonDevice, str], bool]
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
BINARY_SENSORS: Final = (
@@ -41,46 +44,15 @@ BINARY_SENSORS: Final = (
is_on_fn=lambda device, _: device.online,
),
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, _: device.bluetooth_state,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="humanPresenceDetectionState",
key="detectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_on_fn=lambda device, key: bool(
device.sensors[key].value != SENSOR_STATE_OFF
),
is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False
),
),
)
@@ -94,6 +66,15 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "detectionState" binary sensor
await async_update_unique_id(
hass,
coordinator,
BINARY_SENSOR_DOMAIN,
"humanPresenceDetectionState",
"detectionState",
)
known_devices: set[str] = set()
def _check_device() -> None:
@@ -125,3 +106,13 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
return self.entity_description.is_on_fn(
self.device, self.entity_description.key
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError):
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
)
except CannotConnect:
errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError):
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"

View File

@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)},
) from err
except (CannotAuthenticate, TypeError) as err:
except CannotAuthenticate as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",

View File

@@ -60,7 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"online": device.online,
"serial number": device.serial_number,
"software version": device.software_version,
"do not disturb": device.do_not_disturb,
"response style": device.response_style,
"bluetooth state": device.bluetooth_state,
"sensors": device.sensors,
}

View File

@@ -1,44 +1,4 @@
{
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth-off",
"state": {
"on": "mdi:bluetooth"
}
},
"baby_cry_detection": {
"default": "mdi:account-voice-off",
"state": {
"on": "mdi:account-voice"
}
},
"beeping_appliance_detection": {
"default": "mdi:bell-off",
"state": {
"on": "mdi:bell-ring"
}
},
"cough_detection": {
"default": "mdi:blur-off",
"state": {
"on": "mdi:blur"
}
},
"dog_bark_detection": {
"default": "mdi:dog-side-off",
"state": {
"on": "mdi:dog-side"
}
},
"water_sounds_detection": {
"default": "mdi:water-pump-off",
"state": {
"on": "mdi:water-pump"
}
}
}
},
"services": {
"send_sound": {
"service": "mdi:cast-audio"

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "silver",
"requirements": ["aioamazondevices==6.0.0"]
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.7"]
}

View File

@@ -31,6 +31,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
"""Amazon Devices sensor entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
SENSORS: Final = (
@@ -99,3 +102,13 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.device.sensors[self.entity_description.key].value
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -58,26 +58,6 @@
}
},
"entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
},
"baby_cry_detection": {
"name": "Baby crying"
},
"beeping_appliance_detection": {
"name": "Beeping appliance"
},
"cough_detection": {
"name": "Coughing"
},
"dog_bark_detection": {
"name": "Dog barking"
},
"water_sounds_detection": {
"name": "Water sounds"
}
},
"notify": {
"speak": {
"name": "Speak"

View File

@@ -8,13 +8,17 @@ from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
SwitchEntity,
SwitchEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import alexa_api_call
from .utils import alexa_api_call, async_update_unique_id
PARALLEL_UPDATES = 1
@@ -24,16 +28,17 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
"""Alexa Devices switch entity description."""
is_on_fn: Callable[[AmazonDevice], bool]
subkey: str
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
method: str
SWITCHES: Final = (
AmazonSwitchEntityDescription(
key="do_not_disturb",
subkey="AUDIO_PLAYER",
key="dnd",
translation_key="do_not_disturb",
is_on_fn=lambda _device: _device.do_not_disturb,
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
method="set_do_not_disturb",
),
)
@@ -48,6 +53,11 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "DND" switch and remove from Speaker Group
await async_update_unique_id(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
)
known_devices: set[str] = set()
def _check_device() -> None:
@@ -59,7 +69,7 @@ async def async_setup_entry(
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES
for serial_num in new_devices
if switch_desc.subkey in coordinator.data[serial_num].capabilities
if switch_desc.key in coordinator.data[serial_num].sensors
)
_check_device()
@@ -94,3 +104,13 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
def is_on(self) -> bool:
"""Return True if switch is on."""
return self.entity_description.is_on_fn(self.device)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -6,9 +6,12 @@ from typing import Any, Concatenate
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er
from .const import DOMAIN
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonDevicesCoordinator
from .entity import AmazonEntity
@@ -38,3 +41,23 @@ def alexa_api_call[_T: AmazonEntity, **_P](
) from err
return cmd_wrapper
async def async_update_unique_id(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
domain: str,
old_key: str,
new_key: str,
) -> None:
"""Update unique id for entities created with old format."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{old_key}"
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
_LOGGER.debug("Updating unique_id for %s", entity_id)
new_unique_id = unique_id.replace(old_key, new_key)
# Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)

View File

@@ -505,7 +505,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
async def async_devices_payload(hass: HomeAssistant) -> dict:
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"""Return detailed information about entities and devices."""
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
@@ -513,6 +513,8 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
integration_configs: dict[str, AnalyticsModifications] = {}
removed_devices: set[str] = set()
# Get device list
for device_entry in dev_reg.devices.values():
if not device_entry.primary_config_entry:
@@ -525,6 +527,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
if config_entry is None:
continue
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
removed_devices.add(device_entry.id)
continue
integration_domain = config_entry.domain
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
@@ -551,7 +557,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
for domain, integration_info in integration_inputs.items()
if (integration := integrations.get(domain)) is not None
and integration.is_built_in
and integration.integration_type in ("device", "hub")
and integration.manifest.get("integration_type") in ("device", "hub")
}
# Call integrations that implement the analytics platform
@@ -614,11 +620,12 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
device_config = integration_config.devices.get(device_id, device_config)
if device_config.remove:
removed_devices.add(device_id)
continue
device_entry = dev_reg.devices[device_id]
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
device_id_mapping[device_id] = (integration_domain, len(devices_info))
devices_info.append(
{
@@ -669,7 +676,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
entity_entry = ent_reg.entities[entity_id]
entity_state = hass.states.get(entity_entry.entity_id)
entity_state = hass.states.get(entity_id)
entity_info = {
# LIMITATION: `assumed_state` can be overridden by users;
@@ -690,15 +697,19 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
"unit_of_measurement": entity_entry.unit_of_measurement,
}
if (
((device_id_ := entity_entry.device_id) is not None)
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
and (new_device_id[0] == integration_domain)
):
device_info = devices_info[new_device_id[1]]
device_info["entities"].append(entity_info)
else:
entities_info.append(entity_info)
if (device_id_ := entity_entry.device_id) is not None:
if device_id_ in removed_devices:
# The device was removed, so we remove the entity too
continue
if (
new_device_id := device_id_mapping.get(device_id_)
) is not None and (new_device_id[0] == integration_domain):
device_info = devices_info[new_device_id[1]]
device_info["entities"].append(entity_info)
continue
entities_info.append(entity_info)
return {
"version": "home-assistant:1",

View File

@@ -1308,7 +1308,9 @@ class PipelineRun:
# instead of a full response.
all_targets_in_satellite_area = (
self._get_all_targets_in_satellite_area(
conversation_result.response, self._device_id
conversation_result.response,
self._satellite_id,
self._device_id,
)
)
@@ -1337,39 +1339,62 @@ class PipelineRun:
return (speech, all_targets_in_satellite_area)
def _get_all_targets_in_satellite_area(
self, intent_response: intent.IntentResponse, device_id: str | None
self,
intent_response: intent.IntentResponse,
satellite_id: str | None,
device_id: str | None,
) -> bool:
"""Return true if all targeted entities were in the same area as the device."""
if (
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
or (not intent_response.matched_states)
or (not device_id)
):
return False
device_registry = dr.async_get(self.hass)
if (not (device := device_registry.async_get(device_id))) or (
not device.area_id
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
or not intent_response.matched_states
):
return False
entity_registry = er.async_get(self.hass)
for state in intent_response.matched_states:
entity = entity_registry.async_get(state.entity_id)
if not entity:
device_registry = dr.async_get(self.hass)
area_id: str | None = None
if (
satellite_id is not None
and (target_entity_entry := entity_registry.async_get(satellite_id))
is not None
):
area_id = target_entity_entry.area_id
device_id = target_entity_entry.device_id
if area_id is None:
if device_id is None:
return False
if (entity_area_id := entity.area_id) is None:
if (entity.device_id is None) or (
(entity_device := device_registry.async_get(entity.device_id))
is None
):
device_entry = device_registry.async_get(device_id)
if device_entry is None:
return False
area_id = device_entry.area_id
if area_id is None:
return False
for state in intent_response.matched_states:
target_entity_entry = entity_registry.async_get(state.entity_id)
if target_entity_entry is None:
return False
target_area_id = target_entity_entry.area_id
if target_area_id is None:
if target_entity_entry.device_id is None:
return False
entity_area_id = entity_device.area_id
target_device_entry = device_registry.async_get(
target_entity_entry.device_id
)
if target_device_entry is None:
return False
if entity_area_id != device.area_id:
target_area_id = target_device_entry.area_id
if target_area_id != area_id:
return False
return True

View File

@@ -2,9 +2,7 @@
from __future__ import annotations
from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
from typing import Any
TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes",
@@ -36,7 +34,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy(raw: T) -> T:
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists."""
if raw is None:

View File

@@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame
from homeassistant.util import slugify
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
from . import util
from .agent import BackupAgent
@@ -144,7 +145,7 @@ class DownloadBackupView(HomeAssistantView):
return Response(status=HTTPStatus.NOT_FOUND)
else:
stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream))
worker_done_event = asyncio.Event()
@@ -152,7 +153,7 @@ class DownloadBackupView(HomeAssistantView):
"""Call by the worker thread when it's done."""
hass.loop.call_soon_threadsafe(worker_done_event.set)
stream = util.AsyncIteratorWriter(hass)
stream = AsyncIteratorWriter(hass.loop)
worker = threading.Thread(
target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []],

View File

@@ -38,6 +38,7 @@ from homeassistant.helpers import (
)
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import util as backup_util
from .agent import (
@@ -72,7 +73,6 @@ from .models import (
)
from .store import BackupStore
from .util import (
AsyncIteratorReader,
DecryptedBackupStreamer,
EncryptedBackupStreamer,
make_backup_dir,
@@ -1525,7 +1525,7 @@ class BackupManager:
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
else:
backup_stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream))
try:
await self.hass.async_add_executor_job(
validate_password_stream, reader, password

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine
from concurrent.futures import CancelledError, Future
import copy
from dataclasses import dataclass, replace
from io import BytesIO
@@ -14,7 +13,7 @@ from pathlib import Path, PurePath
from queue import SimpleQueue
import tarfile
import threading
from typing import IO, Any, Self, cast
from typing import IO, Any, cast
import aiohttp
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
@@ -23,6 +22,11 @@ from homeassistant.backup_restore import password_to_key
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.async_iterator import (
Abort,
AsyncIteratorReader,
AsyncIteratorWriter,
)
from homeassistant.util.json import JsonObjectType, json_loads_object
from .const import BUF_SIZE, LOGGER
@@ -59,12 +63,6 @@ class BackupEmpty(DecryptError):
_message = "No tar files found in the backup."
class AbortCipher(HomeAssistantError):
"""Abort the cipher operation."""
_message = "Abort cipher operation."
def make_backup_dir(path: Path) -> None:
"""Create a backup directory if it does not exist."""
path.mkdir(exist_ok=True)
@@ -166,106 +164,6 @@ def validate_password(path: Path, password: str | None) -> bool:
return False
class AsyncIteratorReader:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
def abort(self) -> None:
"""Abort the reader."""
self._aborted = True
if self._next_future is not None:
self._next_future.cancel()
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
)
if self._aborted:
self._next_future.cancel()
raise AbortCipher
try:
self._buffer = self._next_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)
n -= len(chunk)
self._pos += len(chunk)
if self._pos == len(self._buffer):
self._buffer = None
return bytes(result)
def close(self) -> None:
"""Close the iterator."""
class AsyncIteratorWriter:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
return self
async def __anext__(self) -> bytes:
"""Get the next chunk from the iterator."""
if data := await self._queue.get():
return data
raise StopAsyncIteration
def abort(self) -> None:
"""Abort the writer."""
self._aborted = True
if self._write_future is not None:
self._write_future.cancel()
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
self._write_future = asyncio.run_coroutine_threadsafe(
self._queue.put(s), self._hass.loop
)
if self._aborted:
self._write_future.cancel()
raise AbortCipher
try:
self._write_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos += len(s)
return len(s)
def validate_password_stream(
input_stream: IO[bytes],
password: str | None,
@@ -342,7 +240,7 @@ def decrypt_backup(
finally:
# Write an empty chunk to signal the end of the stream
output_stream.write(b"")
except AbortCipher:
except Abort:
LOGGER.debug("Cipher operation aborted")
finally:
on_done(error)
@@ -430,7 +328,7 @@ def encrypt_backup(
finally:
# Write an empty chunk to signal the end of the stream
output_stream.write(b"")
except AbortCipher:
except Abort:
LOGGER.debug("Cipher operation aborted")
finally:
on_done(error)
@@ -557,8 +455,8 @@ class _CipherBackupStreamer:
self._hass.loop.call_soon_threadsafe(worker_status.done.set)
stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass)
reader = AsyncIteratorReader(self._hass.loop, stream)
writer = AsyncIteratorWriter(self._hass.loop)
worker = threading.Thread(
target=self._cipher_func,
args=[

View File

@@ -73,11 +73,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
# Add the websocket and API client
entry.runtime_data = BangOlufsenData(websocket, client)
# Start WebSocket connection
await client.connect_notifications(remote_control=True, reconnect=True)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Start WebSocket connection once the platforms have been loaded.
# This ensures that the initial WebSocket notifications are dispatched to entities
await client.connect_notifications(remote_control=True, reconnect=True)
return True

View File

@@ -125,7 +125,8 @@ async def async_setup_entry(
async_add_entities(
new_entities=[
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
]
],
update_before_add=True,
)
# Register actions.
@@ -266,34 +267,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._software_status.software_version,
)
# Get overall device state once. This is handled by WebSocket events the rest of the time.
product_state = await self._client.get_product_state()
# Get volume information.
if product_state.volume:
self._volume = product_state.volume
# Get all playback information.
# Ensure that the metadata is not None upon startup
if product_state.playback:
if product_state.playback.metadata:
self._playback_metadata = product_state.playback.metadata
self._remote_leader = product_state.playback.metadata.remote_leader
if product_state.playback.progress:
self._playback_progress = product_state.playback.progress
if product_state.playback.source:
self._source_change = product_state.playback.source
if product_state.playback.state:
self._playback_state = product_state.playback.state
# Set initial state
if self._playback_state.value:
self._state = self._playback_state.value
self._attr_media_position_updated_at = utcnow()
# Get the highest resolution available of the given images.
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
# If the device has been updated with new sources, then the API will fail here.
await self._async_update_sources()

View File

@@ -3,16 +3,12 @@ beolink_allstandby:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_expand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
all_discovered:
required: false
@@ -37,8 +33,6 @@ beolink_join:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false
@@ -71,16 +65,12 @@ beolink_leave:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_unexpand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false

View File

@@ -272,6 +272,13 @@ async def async_setup_entry(
observations: list[ConfigType] = [
dict(subentry.data) for subentry in config_entry.subentries.values()
]
for observation in observations:
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
observation[CONF_VALUE_TEMPLATE] = Template(
observation[CONF_VALUE_TEMPLATE], hass
)
prior: float = config[CONF_PRIOR]
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)

View File

@@ -315,9 +315,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component))
frontend.async_register_built_in_panel(
hass, "calendar", "calendar", "hass:calendar"
)
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar")
websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete)

View File

@@ -51,12 +51,6 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_time_interval
@@ -118,12 +112,6 @@ ATTR_FILENAME: Final = "filename"
ATTR_MEDIA_PLAYER: Final = "media_player"
ATTR_FORMAT: Final = "format"
# These constants are deprecated as of Home Assistant 2024.10
# Please use the StreamType enum instead.
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
class CameraEntityFeature(IntFlag):
"""Supported features of the camera entity."""
@@ -1117,11 +1105,3 @@ async def async_handle_record_service(
duration=service_call.data[CONF_DURATION],
lookback=service_call.data[CONF_LOOKBACK],
)
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -53,7 +53,6 @@ from .const import (
CONF_ACME_SERVER,
CONF_ALEXA,
CONF_ALIASES,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG,
CONF_FILTER,
@@ -130,7 +129,6 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,

View File

@@ -78,7 +78,6 @@ CONF_USER_POOL_ID = "user_pool_id"
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.1.1"],
"requirements": ["hass-nabucasa==1.2.0"],
"single_config_entry": true
}

View File

@@ -0,0 +1,106 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
The integration does not provide any actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
Stale docstring and test name: `test_form_home` and reusing result.
Extract `async_setup_entry` into own fixture.
Avoid importing `config_flow` in tests.
Test reauth with errors
config-flow:
status: todo
comment: |
The config flow misses data descriptions.
Remove URLs from data descriptions, they should be replaced with placeholders.
Make use of Electricity Maps zone keys in country code as dropdown.
Make use of location selector for coordinates.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: todo
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
`test_sensor` could use `snapshot_platform`
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: |
This integration connect to a single device per configuration entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -25,23 +25,27 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = 111111
DEFAULT_PIN = "111111"
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
}
)

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"quality_scale": "silver",
"quality_scale": "platinum",
"requirements": ["aiocomelit==0.12.3"]
}

View File

@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the config component."""
frontend.async_register_built_in_panel(
hass, "config", "config", "hass:cog", require_admin=True
hass, "config", "config", "mdi:cog", require_admin=True
)
for panel in SECTIONS:

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
from http import HTTPStatus
import logging
from typing import Any, NoReturn
from aiohttp import web
@@ -23,7 +24,12 @@ from homeassistant.helpers.data_entry_flow import (
FlowManagerResourceView,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.json import json_fragment
from homeassistant.helpers.json import (
JSON_DUMP,
find_paths_unserializable_data,
json_bytes,
json_fragment,
)
from homeassistant.loader import (
Integration,
IntegrationNotFound,
@@ -31,6 +37,9 @@ from homeassistant.loader import (
async_get_integrations,
async_get_loaded_integration,
)
from homeassistant.util.json import format_unserializable_data
_LOGGER = logging.getLogger(__name__)
@callback
@@ -402,18 +411,40 @@ def config_entries_flow_subscribe(
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
async_on_flow_init_remove
)
connection.send_message(
websocket_api.event_message(
msg["id"],
[
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
try:
serialized_flows = [
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
]
except (ValueError, TypeError):
# If we can't serialize, we'll filter out unserializable flows
serialized_flows = []
for flw in hass.config_entries.flow.async_progress():
if flw["context"]["source"] in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
):
continue
try:
serialized_flows.append(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
)
],
except (ValueError, TypeError):
_LOGGER.error(
"Unable to serialize to JSON. Bad data found at %s",
format_unserializable_data(
find_paths_unserializable_data(flw, dump=JSON_DUMP)
),
)
continue
connection.send_message(
websocket_api.messages.construct_event_message(
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]"))
)
)
connection.send_result(msg["id"])

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"]
}

View File

@@ -32,6 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SOURCE: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_handle_source_entity_changes(
@@ -46,15 +47,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))

View File

@@ -140,6 +140,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1
MINOR_VERSION = 4

View File

@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
_hass: HomeAssistant
_config: ConfigType
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate device condition config."""
return await async_validate_device_automation_config(
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
)
"""Validate config.
This is here just to satisfy the abstract class interface. It is never called.
"""
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition."""

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from datetime import timedelta
from ipaddress import IPv4Address, IPv6Address
import logging
@@ -55,16 +56,16 @@ async def async_setup_entry(
hostname = entry.data[CONF_HOSTNAME]
name = entry.data[CONF_NAME]
resolver_ipv4 = entry.options[CONF_RESOLVER]
resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
nameserver_ipv4 = entry.options[CONF_RESOLVER]
nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
port_ipv4 = entry.options[CONF_PORT]
port_ipv6 = entry.options[CONF_PORT_IPV6]
entities = []
if entry.data[CONF_IPV4]:
entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4))
entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4))
if entry.data[CONF_IPV6]:
entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6))
entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6))
async_add_entities(entities, update_before_add=True)
@@ -76,11 +77,13 @@ class WanIpSensor(SensorEntity):
_attr_translation_key = "dnsip"
_unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"})
resolver: aiodns.DNSResolver
def __init__(
self,
name: str,
hostname: str,
resolver: str,
nameserver: str,
ipv6: bool,
port: int,
) -> None:
@@ -88,12 +91,12 @@ class WanIpSensor(SensorEntity):
self._attr_name = "IPv6" if ipv6 else None
self._attr_unique_id = f"{hostname}_{ipv6}"
self.hostname = hostname
self.resolver = aiodns.DNSResolver(tcp_port=port, udp_port=port)
self.resolver.nameservers = [resolver]
self.port = port
self.nameserver = nameserver
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
self._retries = DEFAULT_RETRIES
self._attr_extra_state_attributes = {
"resolver": resolver,
"resolver": nameserver,
"querytype": self.querytype,
}
self._attr_device_info = DeviceInfo(
@@ -103,14 +106,26 @@ class WanIpSensor(SensorEntity):
model=aiodns.__version__,
name=name,
)
self.create_dns_resolver()
def create_dns_resolver(self) -> None:
"""Create the DNS resolver."""
self.resolver = aiodns.DNSResolver(
nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port
)
async def async_update(self) -> None:
"""Get the current DNS IP address for hostname."""
if self.resolver._closed: # noqa: SLF001
self.create_dns_resolver()
response = None
try:
response = await self.resolver.query(self.hostname, self.querytype)
async with asyncio.timeout(10):
response = await self.resolver.query(self.hostname, self.querytype)
except TimeoutError:
await self.resolver.close()
except DNSError as err:
_LOGGER.warning("Exception while resolving host: %s", err)
response = None
if response:
sorted_ips = sort_ips(

View File

@@ -116,7 +116,11 @@ class EbusdData:
try:
_LOGGER.debug("Opening socket to ebusd %s", name)
command_result = ebusdpy.write(self._address, self._circuit, name, value)
if command_result is not None and "done" not in command_result:
if (
command_result is not None
and "done" not in command_result
and "empty" not in command_result
):
_LOGGER.warning("Write command failed: %s", name)
except RuntimeError as err:
_LOGGER.error(err)

View File

@@ -176,7 +176,7 @@
"description": "Sets the participating sensors for a climate program.",
"fields": {
"preset_mode": {
"name": "Climate Name",
"name": "Climate program",
"description": "Name of the climate program to set the sensors active on.\nDefaults to currently active program."
},
"device_ids": {
@@ -188,7 +188,7 @@
},
"exceptions": {
"invalid_preset": {
"message": "Invalid climate name, available options are: {options}"
"message": "Invalid climate program, available options are: {options}"
},
"invalid_sensor": {
"message": "Invalid sensor for thermostat, available options are: {options}"

View File

@@ -69,7 +69,9 @@ class EcovacsMap(
await super().async_added_to_hass()
async def on_info(event: CachedMapInfoEvent) -> None:
self._attr_extra_state_attributes["map_name"] = event.name
for map_obj in event.maps:
if map_obj.using:
self._attr_extra_state_attributes["map_name"] = map_obj.name
async def on_changed(event: MapChangedEvent) -> None:
self._attr_image_last_updated = event.when

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==14.0.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
}

View File

@@ -2,3 +2,4 @@ raw_get_positions:
target:
entity:
domain: vacuum
integration: ecovacs

View File

@@ -6,5 +6,5 @@
"dependencies": ["webhook"],
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
"iot_class": "local_push",
"requirements": ["aioecowitt==2025.9.1"]
"requirements": ["aioecowitt==2025.9.2"]
}

View File

@@ -3,14 +3,15 @@
from __future__ import annotations
from datetime import timedelta
from enum import IntEnum
import logging
from typing import Any
from pyephember2.pyephember2 import (
EphEmber,
ZoneMode,
boiler_state,
zone_current_temperature,
zone_is_active,
zone_is_hotwater,
zone_mode,
zone_name,
@@ -53,6 +54,15 @@ EPH_TO_HA_STATE = {
"OFF": HVACMode.OFF,
}
class EPHBoilerStates(IntEnum):
"""Boiler states for a zone given by the api."""
FIXME = 0
OFF = 1
ON = 2
HA_STATE_TO_EPH = {value: key for key, value in EPH_TO_HA_STATE.items()}
@@ -123,7 +133,7 @@ class EphEmberThermostat(ClimateEntity):
@property
def hvac_action(self) -> HVACAction:
"""Return current HVAC action."""
if zone_is_active(self._zone):
if boiler_state(self._zone) == EPHBoilerStates.ON:
return HVACAction.HEATING
return HVACAction.IDLE

View File

@@ -0,0 +1,11 @@
"""Analytics platform."""
from homeassistant.components.analytics import AnalyticsInput, AnalyticsModifications
from homeassistant.core import HomeAssistant
async def async_modify_analytics(
hass: HomeAssistant, analytics_input: AnalyticsInput
) -> AnalyticsModifications:
"""Modify the analytics."""
return AnalyticsModifications(remove=True)

View File

@@ -57,6 +57,7 @@ from .manager import async_replace_device
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
ERROR_INVALID_ENCRYPTION_KEY = "invalid_psk"
ERROR_INVALID_PASSWORD_AUTH = "invalid_auth"
_LOGGER = logging.getLogger(__name__)
ZERO_NOISE_PSK = "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA="
@@ -137,6 +138,11 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
self._password = ""
return await self._async_authenticate_or_add()
if error == ERROR_INVALID_PASSWORD_AUTH or (
error is None and self._device_info and self._device_info.uses_password
):
return await self.async_step_authenticate()
if error is None and entry_data.get(CONF_NOISE_PSK):
# Device was configured with encryption but now connects without it.
# Check if it's the same device before offering to remove encryption.
@@ -690,13 +696,15 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
cli = APIClient(
host,
port or DEFAULT_PORT,
"",
self._password or "",
zeroconf_instance=zeroconf_instance,
noise_psk=noise_psk,
)
try:
await cli.connect()
self._device_info = await cli.device_info()
except InvalidAuthAPIError:
return ERROR_INVALID_PASSWORD_AUTH
except RequiresEncryptionAPIError:
return ERROR_REQUIRES_ENCRYPTION_KEY
except InvalidEncryptionKeyAPIError as ex:

View File

@@ -372,6 +372,9 @@ class ESPHomeManager:
"""Subscribe to states and list entities on successful API login."""
try:
await self._on_connect()
except InvalidAuthAPIError as err:
_LOGGER.warning("Authentication failed for %s: %s", self.host, err)
await self._start_reauth_and_disconnect()
except APIConnectionError as err:
_LOGGER.warning(
"Error getting setting up connection for %s: %s", self.host, err
@@ -641,7 +644,14 @@ class ESPHomeManager:
if self.reconnect_logic:
await self.reconnect_logic.stop()
return
await self._start_reauth_and_disconnect()
async def _start_reauth_and_disconnect(self) -> None:
"""Start reauth flow and stop reconnection attempts."""
self.entry.async_start_reauth(self.hass)
await self.cli.disconnect()
if self.reconnect_logic:
await self.reconnect_logic.stop()
async def _handle_dynamic_encryption_key(
self, device_info: EsphomeDeviceInfo
@@ -1063,7 +1073,7 @@ def _async_register_service(
service_name,
{
"description": (
f"Calls the service {service.name} of the node {device_info.name}"
f"Performs the action {service.name} of the node {device_info.name}"
),
"fields": fields,
},

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==41.9.0",
"aioesphomeapi==41.11.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.3.0"
],

View File

@@ -26,11 +26,14 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
super().__init__(coordinator)
self._serial = serial
self._camera_name = self.data["name"]
connections = set()
if mac_address := self.data["mac_address"]:
connections.add((CONNECTION_NETWORK_MAC, mac_address))
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial)},
connections={
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
},
connections=connections,
manufacturer=MANUFACTURER,
model=self.data["device_sub_category"],
name=self.data["name"],
@@ -62,11 +65,14 @@ class EzvizBaseEntity(Entity):
self._serial = serial
self.coordinator = coordinator
self._camera_name = self.data["name"]
connections = set()
if mac_address := self.data["mac_address"]:
connections.add((CONNECTION_NETWORK_MAC, mac_address))
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial)},
connections={
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
},
connections=connections,
manufacturer=MANUFACTURER,
model=self.data["device_sub_category"],
name=self.data["name"],

View File

@@ -10,7 +10,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Filter from a config entry."""
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -18,8 +17,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Filter config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -246,6 +246,7 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -0,0 +1,27 @@
"""The Firefly III integration."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Set up Firefly III from a config entry."""
coordinator = FireflyDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -0,0 +1,97 @@
"""Config flow for the Firefly III integration."""
from __future__ import annotations
import logging
from typing import Any
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_URL): str,
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
vol.Required(CONF_API_KEY): str,
}
)
async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
"""Validate the user input allows us to connect."""
try:
client = Firefly(
api_url=data[CONF_URL],
api_key=data[CONF_API_KEY],
session=async_get_clientsession(hass),
)
await client.get_about()
except FireflyAuthenticationError:
raise InvalidAuth from None
except FireflyConnectionError as err:
raise CannotConnect from err
except FireflyTimeoutError as err:
raise FireflyClientTimeout from err
return True
class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Firefly III."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
try:
await _validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=user_input[CONF_URL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class FireflyClientTimeout(HomeAssistantError):
"""Error to indicate a timeout occurred."""

View File

@@ -0,0 +1,6 @@
"""Constants for the Firefly III integration."""
DOMAIN = "firefly_iii"
MANUFACTURER = "Firefly III"
NAME = "Firefly III"

View File

@@ -0,0 +1,137 @@
"""Data Update Coordinator for Firefly III integration."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
from aiohttp import CookieJar
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
from pyfirefly.models import Account, Bill, Budget, Category, Currency
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type FireflyConfigEntry = ConfigEntry[FireflyDataUpdateCoordinator]
DEFAULT_SCAN_INTERVAL = timedelta(minutes=5)
@dataclass
class FireflyCoordinatorData:
"""Data structure for Firefly III coordinator data."""
accounts: list[Account]
categories: list[Category]
category_details: list[Category]
budgets: list[Budget]
bills: list[Bill]
primary_currency: Currency
class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]):
"""Coordinator to manage data updates for Firefly III integration."""
config_entry: FireflyConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: FireflyConfigEntry) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.firefly = Firefly(
api_url=self.config_entry.data[CONF_URL],
api_key=self.config_entry.data[CONF_API_KEY],
session=async_create_clientsession(
self.hass,
self.config_entry.data[CONF_VERIFY_SSL],
cookie_jar=CookieJar(unsafe=True),
),
)
async def _async_setup(self) -> None:
"""Set up the coordinator."""
try:
await self.firefly.get_about()
except FireflyAuthenticationError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
async def _async_update_data(self) -> FireflyCoordinatorData:
"""Fetch data from Firefly III API."""
now = datetime.now()
start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
end_date = now
try:
accounts = await self.firefly.get_accounts()
categories = await self.firefly.get_categories()
category_details = [
await self.firefly.get_category(
category_id=int(category.id), start=start_date, end=end_date
)
for category in categories
]
primary_currency = await self.firefly.get_currency_primary()
budgets = await self.firefly.get_budgets()
bills = await self.firefly.get_bills()
except FireflyAuthenticationError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
return FireflyCoordinatorData(
accounts=accounts,
categories=categories,
category_details=category_details,
budgets=budgets,
bills=bills,
primary_currency=primary_currency,
)

View File

@@ -0,0 +1,40 @@
"""Base entity for Firefly III integration."""
from __future__ import annotations
from yarl import URL
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import FireflyDataUpdateCoordinator
class FireflyBaseEntity(CoordinatorEntity[FireflyDataUpdateCoordinator]):
"""Base class for Firefly III entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize a Firefly entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
configuration_url=URL(coordinator.config_entry.data[CONF_URL]),
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.entity_description.key}",
)
},
)

View File

@@ -0,0 +1,18 @@
{
"entity": {
"sensor": {
"account_type": {
"default": "mdi:bank",
"state": {
"expense": "mdi:cash-minus",
"revenue": "mdi:cash-plus",
"asset": "mdi:account-cash",
"liability": "mdi:hand-coin"
}
},
"category": {
"default": "mdi:label"
}
}
}
}

View File

@@ -0,0 +1,10 @@
{
"domain": "firefly_iii",
"name": "Firefly III",
"codeowners": ["@erwindouna"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.6"]
}

View File

@@ -0,0 +1,68 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates:
status: exempt
comment: |
No explicit parallel updates are defined.
reauthentication-flow:
status: todo
comment: |
No reauthentication flow is defined. It will be done in a next iteration.
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -0,0 +1,142 @@
"""Sensor platform for Firefly III integration."""
from __future__ import annotations
from pyfirefly.models import Account, Category
from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.components.sensor.const import SensorDeviceClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
from .entity import FireflyBaseEntity
ACCOUNT_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="account_type",
translation_key="account",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
CATEGORY_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="category",
translation_key="category",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: FireflyConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Firefly III sensor platform."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = [
FireflyAccountEntity(
coordinator=coordinator,
entity_description=description,
account=account,
)
for account in coordinator.data.accounts
for description in ACCOUNT_SENSORS
]
entities.extend(
FireflyCategoryEntity(
coordinator=coordinator,
entity_description=description,
category=category,
)
for category in coordinator.data.category_details
for description in CATEGORY_SENSORS
)
async_add_entities(entities)
class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III account."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
account: Account,
) -> None:
"""Initialize Firefly account entity."""
super().__init__(coordinator, entity_description)
self._account = account
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{account.id}"
self._attr_name = account.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
# Account type state doesn't go well with the icons.json. Need to fix it.
if account.attributes.type == "expense":
self._attr_icon = "mdi:cash-minus"
elif account.attributes.type == "asset":
self._attr_icon = "mdi:account-cash"
elif account.attributes.type == "revenue":
self._attr_icon = "mdi:cash-plus"
elif account.attributes.type == "liability":
self._attr_icon = "mdi:hand-coin"
else:
self._attr_icon = "mdi:bank"
@property
def native_value(self) -> str | None:
"""Return the state of the sensor."""
return self._account.attributes.current_balance
@property
def extra_state_attributes(self) -> dict[str, str] | None:
"""Return extra state attributes for the account entity."""
return {
"account_role": self._account.attributes.account_role or "",
"account_type": self._account.attributes.type or "",
"current_balance": str(self._account.attributes.current_balance or ""),
}
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III category."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
category: Category,
) -> None:
"""Initialize Firefly category entity."""
super().__init__(coordinator, entity_description)
self._category = category
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{category.id}"
self._attr_name = category.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
spent_items = self._category.attributes.spent or []
earned_items = self._category.attributes.earned or []
spent = sum(float(item.sum) for item in spent_items if item.sum is not None)
earned = sum(float(item.sum) for item in earned_items if item.sum is not None)
if spent == 0 and earned == 0:
return None
return spent + earned

View File

@@ -0,0 +1,39 @@
{
"config": {
"step": {
"user": {
"data": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "[%key:common::config_flow::data::api_key%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "The API key for authenticating with Firefly",
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
},
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
}
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while trying to connect to the Firefly instance: {error}"
},
"invalid_auth": {
"message": "An error occurred while trying to authenticate: {error}"
},
"timeout_connect": {
"message": "A timeout occurred while trying to connect to the Firefly instance: {error}"
}
}
}

View File

@@ -46,6 +46,9 @@ async def async_get_config_entry_diagnostics(
}
for _, device in avm_wrapper.devices.items()
],
"cpu_temperatures": await hass.async_add_executor_job(
avm_wrapper.fritz_status.get_cpu_temperatures
),
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
},
}

View File

@@ -459,7 +459,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"developer-tools",
require_admin=True,
sidebar_title="developer_tools",
sidebar_icon="hass:hammer",
sidebar_icon="mdi:hammer",
)
@callback

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250924.0"]
"requirements": ["home-assistant-frontend==20251001.0"]
}

View File

@@ -1,8 +1,10 @@
load_url:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
url:
example: "https://home-assistant.io"
required: true
@@ -10,10 +12,12 @@ load_url:
text:
set_config:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
key:
example: "motionSensitivity"
required: true
@@ -26,12 +30,14 @@ set_config:
text:
start_application:
target:
device:
integration: fully_kiosk
fields:
application:
example: "de.ozerov.fully"
required: true
selector:
text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,6 +147,10 @@
"name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": {
"name": "[%key:common::config_flow::data::url%]",
"description": "URL to load."
@@ -157,6 +161,10 @@
"name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": {
"name": "Key",
"description": "Configuration parameter to set."
@@ -174,6 +182,10 @@
"application": {
"name": "Application",
"description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
}
}
}

View File

@@ -108,6 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the humidifer,
@@ -140,6 +141,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -148,7 +150,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -186,11 +187,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -96,6 +96,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -35,6 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HEATER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the heater, but
@@ -67,6 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -75,7 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -113,11 +114,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -104,6 +104,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
async def async_update(self) -> None:
"""Update an entity's state data."""
if "_state" in self._device.data: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(
self._device.data["_state"]["lastComms"]
)
if (state := self._device.data.get("_state")) and (
last_comms := state.get("lastComms")
) is not None: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(last_comms)
class GeniusZone(GeniusEntity):

View File

@@ -1,7 +1,5 @@
set_vacation:
target:
device:
integration: google_mail
entity:
integration: google_mail
fields:

View File

@@ -141,15 +141,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await hass.config_entries.async_forward_entry_setups(
entry, (entry.options["group_type"],)
)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -329,6 +329,7 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
@callback
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:

View File

@@ -1,5 +1,7 @@
"""Coordinator module for managing Growatt data fetching."""
from __future__ import annotations
import datetime
import json
import logging
@@ -145,7 +147,7 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return self.data.get("currency")
def get_data(
self, entity_description: "GrowattSensorEntityDescription"
self, entity_description: GrowattSensorEntityDescription
) -> str | int | float | None:
"""Get the data."""
variable = entity_description.api_key

View File

@@ -4,9 +4,14 @@ from uuid import UUID
from habiticalib import Habitica
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
@@ -27,6 +32,7 @@ PLATFORMS = [
Platform.BUTTON,
Platform.CALENDAR,
Platform.IMAGE,
Platform.NOTIFY,
Platform.SENSOR,
Platform.SWITCH,
Platform.TODO,
@@ -46,6 +52,7 @@ async def async_setup_entry(
"""Set up habitica from a config entry."""
party_added_by_this_entry: UUID | None = None
device_reg = dr.async_get(hass)
entity_registry = er.async_get(hass)
session = async_get_clientsession(
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
@@ -96,6 +103,15 @@ async def async_setup_entry(
device.id, remove_config_entry_id=config_entry.entry_id
)
notify_entities = [
entry.entity_id
for entry in entity_registry.entities.values()
if entry.domain == NOTIFY_DOMAIN
and entry.config_entry_id == config_entry.entry_id
]
for entity_id in notify_entities:
entity_registry.async_remove(entity_id)
hass.config_entries.async_schedule_reload(config_entry.entry_id)
coordinator.async_add_listener(_party_update_listener)

View File

@@ -121,4 +121,4 @@ class HabiticaPartyBinarySensorEntity(HabiticaPartyBase, BinarySensorEntity):
@property
def is_on(self) -> bool | None:
"""If the binary sensor is on."""
return self.coordinator.data.quest.active
return self.coordinator.data.party.quest.active

View File

@@ -9,6 +9,7 @@ from datetime import timedelta
from io import BytesIO
import logging
from typing import Any
from uuid import UUID
from aiohttp import ClientError
from habiticalib import (
@@ -48,6 +49,14 @@ class HabiticaData:
tasks: list[TaskData]
@dataclass
class HabiticaPartyData:
"""Habitica party data."""
party: GroupData
members: dict[UUID, UserData]
type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator]
@@ -192,11 +201,19 @@ class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
return png.getvalue()
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[GroupData]):
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[HabiticaPartyData]):
"""Habitica Party Coordinator."""
_update_interval = timedelta(minutes=15)
async def _update_data(self) -> GroupData:
async def _update_data(self) -> HabiticaPartyData:
"""Fetch the latest party data."""
return (await self.habitica.get_group()).data
return HabiticaPartyData(
party=(await self.habitica.get_group()).data,
members={
member.id: member
for member in (await self.habitica.get_group_members()).data
if member.id
},
)

View File

@@ -68,14 +68,14 @@ class HabiticaPartyBase(CoordinatorEntity[HabiticaPartyCoordinator]):
super().__init__(coordinator)
if TYPE_CHECKING:
assert config_entry.unique_id
unique_id = f"{config_entry.unique_id}_{coordinator.data.id!s}"
unique_id = f"{config_entry.unique_id}_{coordinator.data.party.id!s}"
self.entity_description = entity_description
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
model=NAME,
name=coordinator.data.summary,
name=coordinator.data.party.summary,
identifiers={(DOMAIN, unique_id)},
via_device=(DOMAIN, config_entry.unique_id),
)

View File

@@ -194,6 +194,11 @@
"quest_running": {
"default": "mdi:script-text-play"
}
},
"notify": {
"party_chat": {
"default": "mdi:forum"
}
}
},
"services": {

Some files were not shown because too many files have changed in this diff Show More