Compare commits

...

334 Commits

Author SHA1 Message Date
J. Nick Koston
87a6a029bb Bump habluetooth to 5.7.0 (#153665) 2025-10-04 12:16:51 -05:00
Jan Bouwhuis
1cc3c22d3f Fix MQTT Lock state reset to unknown when a reset payload is received (#153647) 2025-10-04 17:32:18 +02:00
Marc Mueller
2341d1d965 Fix flaky template test (#153624) 2025-10-04 18:28:36 +03:00
J. Nick Koston
a0bae9485c Bump bluetooth-data-tools to 1.28.3 (#153653) 2025-10-04 18:27:09 +03:00
J. Nick Koston
f281b0fc6b Bump annotatedyaml to 1.0.2 (#153651) 2025-10-04 18:26:55 +03:00
Manu
6f89fe81cc Remove Plum Lightpad integration (#153590) 2025-10-04 16:42:28 +02:00
Hessel
34f6ead7a1 Wallbox fix Rate Limit issue for multiple chargers (#153074) 2025-10-04 16:38:11 +02:00
Kevin McCormack
8985527a87 Bump libpyvivotek to 0.6.1 and add strict typing for Vivotek integration (#153342)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-04 16:34:37 +02:00
Marc Mueller
bd87a3aa4d Update PyYAML to 6.0.3 (#153626) 2025-10-04 16:27:22 +02:00
Manu
768a505904 Add translations and icons to OralB integration (#153605) 2025-10-04 16:23:27 +02:00
Marc Mueller
d97c1f0fc3 Update grpcio to 1.75.1 (#153643) 2025-10-04 16:21:16 +02:00
Joakim Plate
c3fcd34d4c Fix blue current mocking out platform with empty string (#153604)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-10-04 15:17:56 +02:00
Joakim Plate
44d9eaea95 Correct kraken test issues (#153601) 2025-10-04 12:25:35 +02:00
G Johansson
0f34f5139a Fix sql repair string (#153619) 2025-10-04 10:39:26 +02:00
Ludovic BOUÉ
2afb1a673d Add Matter Thermostat OccupancySensor (#153166)
Co-authored-by: Björn Ebbinghaus <bjoern@ebbinghaus.me>
Co-authored-by: TheJulianJES <TheJulianJES@users.noreply.github.com>
2025-10-04 10:31:17 +02:00
G Johansson
c2f7f29630 Setup platform services during integration start in sensibo (#153571) 2025-10-04 10:30:01 +02:00
G Johansson
b01f5dd24b Raise repairs on platform setup for sql (#153581) 2025-10-04 07:02:36 +02:00
Luke Lashley
0cda0c449f Update the map parser in Roborock vacuum to use coord parser. (#153520) 2025-10-03 19:59:57 -07:00
dollaransh17
40fdf12bc9 Fix string interpolation in local_todo error messages (#153580)
Co-authored-by: dollaransh17 <dollaransh17@users.noreply.github.com>
2025-10-03 19:57:50 -07:00
Luke Lashley
3939a80302 Switch Roborock to v4 of the code login api (#153593) 2025-10-03 19:41:39 -07:00
Luke Lashley
d32a102613 Add two new consumable sensors to Roborock (#153606) 2025-10-03 19:38:29 -07:00
Felipe Santos
20949d39c4 Address comments for the add-on switch entity (#153518) 2025-10-04 02:22:59 +02:00
Kevin Stillhammer
310a0c8d13 Use SensorDescription for GoogleTravelTimeSensor (#153585) 2025-10-04 01:29:26 +02:00
Arie Catsman
c9e80ac7e9 Extend enphase_envoy test data with new library data fields (#153591) 2025-10-04 01:24:11 +02:00
G Johansson
5df4e9e1cf Bump pynordpool to 0.3.1 (#153599) 2025-10-04 01:23:09 +02:00
Simone Chemelli
4022ee74e8 Bump aioamazondevices to 6.2.8 (#153592) 2025-10-04 01:22:15 +02:00
Erwin Douna
80a4115c44 Portainer follow-up points (#153594) 2025-10-03 22:36:55 +02:00
Manu
ce548efd80 Remove IBM Watson IoT Platform integration (#153567) 2025-10-03 21:18:39 +02:00
dependabot[bot]
2edf622b41 Bump github/codeql-action from 3.30.5 to 3.30.6 (#153524)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-03 21:15:47 +02:00
Manu
66ac9078aa Improve Habitica tests (#153573) 2025-10-03 20:55:38 +02:00
Erwin Douna
ba75f18f5a Portainer add switch platform (#153485)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-03 20:52:37 +02:00
Arie Catsman
8ee2ece03e Bump pyenphase to 2.4.0 (#153583) 2025-10-03 21:14:34 +03:00
Michael
7060ab8c44 Remove Vultr integration (#153560) 2025-10-03 21:12:26 +03:00
Paulus Schoutsen
85d8244b8a When discovering a Z-Wave adapter, always configure add-on in config flow (#153575) 2025-10-03 19:16:51 +02:00
Abílio Costa
3f9421ab08 Debounce updates in Idasen Desk (#153503) 2025-10-03 18:39:14 +02:00
Luke Lashley
2f3fbf00b7 Bump python-roborock to 2.50.2 (#153561) 2025-10-03 09:30:30 -07:00
Paulus Schoutsen
d595ec8a07 Z-Wave to support migrating from USB to socket with same home ID (#153522) 2025-10-03 10:46:12 -04:00
Ståle Storø Hauknes
4ff5462cc4 Bump Airthings BLE to 1.1.1 (#153529) 2025-10-03 15:50:20 +02:00
Shay Levy
404f95b442 Add Shelly support for valve entities (#153348) 2025-10-03 15:04:35 +03:00
cdnninja
89cf784022 Fix VeSync zero fan speed handling (#153493)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-10-03 11:56:02 +02:00
Copilot
02142f352d Fix awair integration AttributeError when update listener accesses runtime_data (#153521)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: balloob <1444314+balloob@users.noreply.github.com>
2025-10-03 05:49:41 -04:00
Stefan Agner
ec3dd7d1e5 Add num open fds sensor to systemmonitor (#152441)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-10-03 10:53:02 +02:00
Ståle Storø Hauknes
7355799030 Fix typo in Airthings BLE config flow (#153512) 2025-10-03 08:20:17 +02:00
Erik Montnemery
982166df3c Remove module recorder.history.modern (#153502) 2025-10-03 01:00:09 +02:00
puddly
c7d3512ad2 Bump universal-silabs-flasher to 0.0.35 (#153500) 2025-10-03 00:18:05 +02:00
Aidan Timson
ada6f7b3fb Update ovoenergy to 3.0.2 (#153488) 2025-10-02 22:44:28 +01:00
Erik Montnemery
78e16495bd Remove runtime support for recorder DB without States.last_reported_ts (#153495) 2025-10-02 23:15:15 +02:00
starkillerOG
12085e6152 Improve Reolink docstrings (#153498) 2025-10-02 23:10:27 +02:00
starkillerOG
6764463689 Use new Reolink rec_enable flag (#153496) 2025-10-02 23:09:43 +02:00
starkillerOG
7055276665 Allign naming of Reolink host switch entities (#153494) 2025-10-02 23:09:17 +02:00
starkillerOG
71b3ebd15a Cleanup reolink update entity migration (#153492) 2025-10-02 22:49:55 +02:00
starkillerOG
b87910e596 Bump reolink-aio to 0.16.1 (#153489) 2025-10-02 22:49:39 +02:00
Erik Montnemery
e19bfd670b Bump recorder live schema migration to schema version 48 (#153404) 2025-10-02 22:47:01 +02:00
dollaransh17
7b3c96e80b Remove deprication code for reolink Hub switches (#153483)
Thank you, good work!
2025-10-02 22:37:08 +02:00
Erik Montnemery
01ff3cf9d9 Start recorder data migration after schema migration (#153471) 2025-10-02 22:21:49 +02:00
Erik Montnemery
d66da0c10d Respect filtering of WS subscribe_entities when there are unserializalizable states (#153262) 2025-10-02 22:20:45 +02:00
Josef Zweck
3491bb1b40 Fix missing parameter pass in onedrive (#153478) 2025-10-02 22:17:56 +02:00
G Johansson
3bf995eb71 Fix next event in workday calendar (#153465) 2025-10-02 22:17:11 +02:00
Joost Lekkerkerker
2169ce1722 Remove state attributes from Firefly 3 (#153285) 2025-10-02 22:14:51 +02:00
Joost Lekkerkerker
275e9485e9 Fix missing powerconsumptionreport in Smartthings (#153438) 2025-10-02 22:08:48 +02:00
Daniel Hjelseth Høyer
95198ae540 Bump pyTibber to 0.32.2 (#153484) 2025-10-02 21:04:32 +02:00
Aidan Timson
aed2d3899d Update OVOEnergy to 3.0.1 (#153476) 2025-10-02 21:04:16 +02:00
Erik Montnemery
4011d62ac7 Improve enable_migrate_event_ids recorder test fixture (#153470)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-10-02 21:03:26 +02:00
Björn Ebbinghaus
d2aa0573de Add relative humidity to matter climate entities (#152554)
OK after talking with Marcel.
2025-10-02 20:44:19 +02:00
Ståle Storø Hauknes
571b2e3ab6 Fix Airthings config flow description (#153452) 2025-10-02 20:38:27 +02:00
peteS-UK
a7f48360b7 Add PARALLEL_UPDATES to Squeezebox switch platform (#153477) 2025-10-02 20:32:37 +02:00
Erik Montnemery
22f2f8680a Improve recorder migration tests dropping indices (#153456) 2025-10-02 20:16:24 +02:00
Aidan Timson
d92004a9e7 Add missing translation for media browser default title (#153430)
Co-authored-by: Erwin Douna <e.douna@gmail.com>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-10-02 19:33:57 +02:00
Norbert Rittel
64875894d6 Fix sentence-casing in user-facing strings of slack (#153427) 2025-10-02 19:27:28 +02:00
Ståle Storø Hauknes
3f7a288526 Add data_description field for Airthings BLE (#153442) 2025-10-02 19:25:59 +02:00
Ståle Storø Hauknes
a2a067a81c Add serial number to the list of discovered devices (#153448) 2025-10-02 19:25:19 +02:00
Erwin Douna
f9f61b8da7 Portainer add configuration URL's (#153466) 2025-10-02 19:22:34 +02:00
Paul Bottein
cd69b82fc9 Add light, security and climate panel (#153261) 2025-10-02 13:06:53 -04:00
Shay Levy
d20631598e Bump aioshelly 13.11.0 (#153458) 2025-10-02 19:44:24 +03:00
puddly
229ebe16f3 Disable baudrate bootloader reset for ZBT-2 (#153443) 2025-10-02 17:36:10 +01:00
G Johansson
a172f67d37 Fix Nord Pool 15 minute interval (#153350) 2025-10-02 18:26:33 +02:00
Joost Lekkerkerker
ee4a1de566 Add translation for turbo fan mode in SmartThings (#153445)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-02 18:45:37 +03:00
epenet
7ab99c028c Add new test fixture for Tuya wk category (#153457) 2025-10-02 17:29:14 +02:00
TheJulianJES
0e1d12b1ae Fix Z-Wave RGB light turn on causing rare ZeroDivisionError (#153422) 2025-10-02 17:26:49 +02:00
Artur Pragacz
e090ddd761 Move entities to the end of devices in analytics payload (#153449) 2025-10-02 16:36:38 +02:00
Stefan Agner
9721ce6877 Update Home Assistant base image to 2025.10.0 (#153441) 2025-10-02 15:17:59 +02:00
MoonDevLT
8dde94f421 Add Lunatone gateway integration (#149182)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-02 12:55:17 +01:00
dollaransh17
f5f6b22af1 Fix spelling error in logbook tests (#153417)
Co-authored-by: dollaransh17 <dollaransh17@users.noreply.github.com>
2025-10-02 12:49:22 +02:00
Tom Matheussen
f8a93b6561 Add Quality Scale to Satel Integra (#153122) 2025-10-02 12:48:34 +02:00
epenet
840a03f048 Add new dehumidifier fixture for Tuya (#153407) 2025-10-02 12:15:28 +02:00
Erwin Douna
85f3b5ce78 Firefly III add re-auth flow (#153303)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-10-02 12:15:10 +02:00
Michael
f4284fec2f Explicit pass in the config entry to coordinator in airtouch4 (#153361)
Co-authored-by: Josef Zweck <josef@zweck.dev>
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-10-02 11:54:20 +02:00
Abílio Costa
3a89b3152f Move common Uptime Robot new device check logic to helper (#153094)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-02 10:52:22 +01:00
epenet
a0356328c3 Use walrus and combine conditions in Tuya alarm control panel (#153426) 2025-10-02 11:52:08 +02:00
Michael J. Kidd
4b6f37b1d7 Pushover: Handle empty data section properly (#153397) 2025-10-02 11:48:03 +02:00
johanzander
716705fb5a Adds token authentication and usage of official API for Growatt MIN/TLX inverters (#149783) 2025-10-02 11:40:53 +02:00
J. Nick Koston
d246836480 Bump aiohomekit to 3.2.19 (#153423) 2025-10-02 11:17:17 +02:00
Guido Schmitz
6ee2b82d15 Cleanup sync_callback in devolo Home Control (#153321) 2025-10-02 11:11:19 +02:00
Luke Lashley
73ff8d36a5 Bump python-roborock to 2.49.1 (#153396) 2025-10-02 11:09:44 +02:00
Manu
1397def3b8 Add last check-in sensor to Habitica integration (#153293) 2025-10-02 11:03:21 +02:00
epenet
d443529041 Add more sensors to Tuya weather monitor (#153420) 2025-10-02 10:58:24 +02:00
G Johansson
373bb20f1b Remove deprecated entity feature constants in vacuum (#153364) 2025-10-02 10:46:34 +02:00
Erik Montnemery
3b44cce6dc Improve recorder migration test (#153405) 2025-10-02 10:45:04 +02:00
Joakim Plate
46056fe45b Correct blocking update in ToGrill with lack of notifications (#153387) 2025-10-02 10:44:42 +02:00
epenet
1816c190b2 Add test fixture for new Tuya cjkg category (#153411) 2025-10-02 10:43:54 +02:00
Josef Zweck
00abaee6b3 Increase onedrive upload chunk size (#153406) 2025-10-02 10:43:10 +02:00
Manu
3a301f54e0 Update markdown field description in ntfy integration (#153421) 2025-10-02 10:40:33 +02:00
Denis Shulyaka
762accbd6d Disable thinking for unsupported gemini models (#153415) 2025-10-02 10:38:31 +02:00
Tom Matheussen
e0422d7d34 Fix Satel Integra creating new binary sensors on YAML import (#153419) 2025-10-02 10:37:41 +02:00
Erwin Douna
6ba2057a88 Bump pyportainer 1.0.3 (#153413) 2025-10-02 10:34:11 +02:00
epenet
752969bce5 Add test fixture for new Tuya jsq category (#153412) 2025-10-02 10:33:02 +02:00
Franck Nijhof
efbdfd2954 Merge branch 'master' into dev 2025-10-02 07:06:58 +00:00
Erik Montnemery
bb7a177a5d Improve recorder migration tests (#153388) 2025-10-02 07:45:04 +02:00
Kinachi249
9b56ca8cde Bump PyCync to 0.4.1 (#153401) 2025-10-02 07:11:34 +02:00
starkillerOG
b0a08782e0 Add Roborock mop intensity translations (#153380) 2025-10-01 22:51:26 +02:00
G Johansson
6c9955f220 Remove deprecated constants in camera (#153363) 2025-10-01 22:20:34 +02:00
G Johansson
f56b94c0f9 Remove deprecated constants from media_player (#153366) 2025-10-01 22:20:07 +02:00
G Johansson
3cf035820b Remove deprecated state constants from lock (#153367) 2025-10-01 22:16:52 +02:00
Erik Montnemery
99a796d066 Remove legacy history queries from recorder (#153324) 2025-10-01 22:06:56 +02:00
Erik Montnemery
1cd1b1aba8 Remove to_native method from recorder database schemas (#153334) 2025-10-01 21:25:05 +02:00
Ståle Storø Hauknes
4131c14629 Add parallel updates to airthings_ble (#153315) 2025-10-01 20:14:23 +02:00
Tom
c2acda5796 Bump airOS module for alternative login url (#153317) 2025-10-01 20:11:35 +02:00
Marc Mueller
4806e7e9d9 Update cryptography to 46.0.2 (#153327) 2025-10-01 19:52:57 +02:00
Marc Mueller
76606fd44f Update types packages (#153330) 2025-10-01 19:51:37 +02:00
Andre Lengwenus
2983f1a3b6 Explicitly check for None in raw value processing of modbus (#153352) 2025-10-01 19:48:35 +02:00
Michael
8019779b3a Set config entry to None in ProxmoxVE (#153357) 2025-10-01 19:45:34 +02:00
Marc Mueller
62cdcbf422 Misc typing improvements (#153322) 2025-10-01 19:30:41 +02:00
Marc Mueller
b12a5a36e1 Update bcrpyt to 5.0.0 (#153325) 2025-10-01 20:07:45 +03:00
epenet
e32763e464 Add water heater fixture for Tuya tests (#153336) 2025-10-01 20:02:54 +03:00
Stefan Agner
b85cf3f9d2 Bump aiohasupervisor to 0.3.3 (#153344) 2025-10-01 20:01:53 +03:00
puddly
3777bcc2af Do not reset the adapter twice during ZHA options flow migration (#153345) 2025-10-01 18:22:41 +02:00
Maciej Bieniek
52cde48ff0 Add missing test for Shelly config flow (#153346) 2025-10-01 18:32:57 +03:00
Marc Mueller
bf1da35303 Update pyOpenSSL to 25.3.0 (#153329) 2025-10-01 17:32:08 +02:00
Erwin Douna
c1bf11da34 Bump pyportainer 1.0.2 (#153326) 2025-10-01 17:07:21 +02:00
Erwin Douna
3c20325b37 Bump pyfirefly 0.1.6 (#153335) 2025-10-01 17:06:31 +02:00
Maciej Bieniek
fd8ccb8d8f Improve mac_address_from_name() function to avoid double discovery of Shelly devices (#153343) 2025-10-01 16:49:27 +02:00
Michael Hansen
d76e947021 Bump intents to 2025.10.1 (#153340) 2025-10-01 09:39:08 -05:00
Erik Montnemery
c91ed96543 Use pytest.mark.usefixtures in history tests (#153306) 2025-10-01 15:53:55 +02:00
HarvsG
b164531ba8 Bayesian - add config entry tests (#153316) 2025-10-01 15:46:16 +02:00
Erik Montnemery
7c623a8704 Use pytest.mark.usefixtures in some recorder tests (#153313) 2025-10-01 15:38:51 +02:00
Maciej Bieniek
7ae3340336 Add test for full device snapshot for Shelly Wall Display XL (#153305) 2025-10-01 16:00:15 +03:00
Marc Mueller
653b73c601 Fix device_automation RuntimeWarning in tests (#153319) 2025-10-01 14:26:09 +02:00
Artur Pragacz
7c93d91bae Filter out service type devices in extended analytics (#153271) 2025-10-01 12:38:50 +02:00
Abílio Costa
07da0cfb2b Stop writing to config dir log file on supervised install (#146675)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-10-01 11:11:00 +01:00
Artur Pragacz
b411a11c2c Add analytics platform to esphome (#153311) 2025-10-01 12:08:50 +02:00
epenet
0555b84d05 Add new cover fixture for Tuya (#153310) 2025-10-01 12:01:37 +02:00
TheJulianJES
790bddef63 Improve ZHA multi-pan firmware repair text (#153232) 2025-10-01 11:50:01 +02:00
TheJulianJES
a3089b8aa7 Replace remaining ZHA "radio" strings with "adapter" (#153234) 2025-10-01 11:46:08 +02:00
puddly
77c8426d63 Use hardware bootloader reset methods for firmware config flows (#153277) 2025-10-01 11:43:28 +02:00
TheJulianJES
faf226f6c2 Fix ZHA unable to select "none" flow control (#153235) 2025-10-01 11:42:50 +02:00
HarvsG
06d143b81a Fix Bayesian ConfigFlow templates in 2025.10 (#153289)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-10-01 11:39:23 +02:00
Erik Montnemery
08b6a0a702 Add device class filter to switcher_kis services (#153248) 2025-10-01 12:27:17 +03:00
Bram Kragten
a20d1e3656 Update frontend to 20251001.0 (#153300) 2025-10-01 09:50:30 +02:00
Erwin Douna
36cc3682ca Add Firefly III integration (#147062)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-30 23:34:33 +02:00
Aviad Levy
1b495ecafa Add support for errored torrents in qBittorrent sensor (#153120)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-30 23:34:15 +02:00
puddly
7d1a0be07e Reduce Connect firmware install times by removing unnecessary firmware probing (#153012) 2025-09-30 22:41:51 +02:00
Geoffrey
327f65c991 Add switch domain to VegeHub integration (#148436)
Co-authored-by: GhoweVege <85890024+GhoweVege@users.noreply.github.com>
2025-09-30 22:38:05 +02:00
Manu
4ac89f6849 Add notify platform to Habitica (#150553) 2025-09-30 22:35:55 +02:00
Nojus
db3b070ed0 Add meteo_lt integration (#152948)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-30 22:17:36 +02:00
anishsane
6d940f476a Add support for Media player Mute/Unmute intents (#150508) 2025-09-30 14:37:19 -05:00
Erwin Douna
1ca701dda4 Portainer fix CONF_VERIFY_SSL (#153269)
Co-authored-by: Robert Resch <robert@resch.dev>
2025-09-30 21:36:04 +02:00
Joost Lekkerkerker
291c44100c Add Eltako brand (#153276) 2025-09-30 21:29:58 +02:00
Joost Lekkerkerker
c8d676e06b Add Konnected brand (#153280) 2025-09-30 21:27:43 +02:00
Joost Lekkerkerker
4c1ae0eddc Add Level brand (#153279) 2025-09-30 21:21:21 +02:00
Norbert Rittel
39eadc814f Replace "Climate name" with "Climate program" in ecobee action (#153264) 2025-09-30 21:16:37 +02:00
Robert Resch
f7ecad61ba Bump aioecowitt to 2025.9.2 (#153273) 2025-09-30 20:58:34 +02:00
Norbert Rittel
fa4cb54549 Fix sentence-casing in two title strings of roomba (#153281) 2025-09-30 20:51:44 +02:00
Manu
2be33c5e0a Update quality scale of ntfy integration to platinum 🏆️ (#151785) 2025-09-30 20:36:18 +02:00
LG-ThinQ-Integration
904d7e5d5a Add air/water filter state in percent to LG ThinQ (#152150)
Co-authored-by: yunseon.park <yunseon.park@lge.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-30 20:26:47 +02:00
Pete Sage
dbc4a65d48 Fix Sonos Dialog Select type conversion part II (#152491)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-30 20:25:19 +02:00
Pete Sage
b93f4aabf1 Add tests for Sonos media metadata (#152622) 2025-09-30 20:24:57 +02:00
Joost Lekkerkerker
9eaa40c7a4 Require cloud for Aladdin Connect (#153278)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-30 19:57:24 +02:00
Lucas Mindêllo de Andrade
b308a882fb Add Roomba J9 compatibility to the roomba integration (#145913)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-30 19:10:22 +02:00
Erik Montnemery
7f63ba2087 Improve saved state of RestoreSensor when using freezegun (#152740) 2025-09-30 18:27:56 +02:00
Erik Montnemery
d7269cfcc6 Use pytest_unordered in additional service helper tests (#153255) 2025-09-30 18:26:32 +02:00
starkillerOG
2850a574f6 Add Reolink floodlight event entities (#152564) 2025-09-30 17:59:12 +02:00
Samuel Xiao
dcb8d4f702 Add support model [relay switch 2pm] for switchbot cloud (#148381) 2025-09-30 17:49:32 +02:00
Samuel Xiao
aeadc0c4b0 Add lock support to Switchbot Cloud (#148310) 2025-09-30 17:48:38 +02:00
Nathan Spencer
683c6b17be Add release url to Litter-Robot 4 update entity (#152504) 2025-09-30 17:47:27 +02:00
Samuel Xiao
69dd5c91b7 Switchbot Cloud: Fix Roller Shade not work issue (#152528) 2025-09-30 17:05:23 +02:00
HarvsG
5cf7dfca8f Pihole better logging of update errors (#152077) 2025-09-30 16:59:03 +02:00
Marc Mueller
62a49d4244 Update pandas to 2.3.3 (#153251) 2025-09-30 16:58:41 +02:00
falconindy
93ee6322f2 snoo: add button entity for calling start_snoo (#151052)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-30 16:57:58 +02:00
Artur Pragacz
914990b58a Add analytics platform to wled (#153258) 2025-09-30 10:39:32 -04:00
Joakim Sørensen
f78bb5adb6 Bump hass-nabucasa from 1.1.2 to 1.2.0 (#153250) 2025-09-30 15:29:04 +02:00
Erik Montnemery
905f5e7289 Add device class filter to entity services (#153247) 2025-09-30 14:28:04 +01:00
Erik Montnemery
ec503618c3 Handle errors in WS manifest/list (#153256) 2025-09-30 15:12:41 +02:00
Erik Montnemery
7a41cbc314 Skip unserializable flows in WS config_entries/flow/subscribe (#153259) 2025-09-30 15:12:19 +02:00
Erik Montnemery
c58ba734e7 Correct target filter in osoenergy services (#153244) 2025-09-30 14:06:14 +02:00
Erik Montnemery
68f63be62f Correct target filter in litterrobot services (#153243) 2025-09-30 14:05:46 +02:00
Erik Montnemery
2aa4ca1351 Correct homekit service definition (#153242) 2025-09-30 14:04:09 +02:00
Imeon-Energy
fbabb27787 Add forecast energy sensor to Imeon inverter integration (#152176)
Co-authored-by: TheBushBoy <theodavid@icloud.com>
2025-09-30 13:35:18 +02:00
Markus Jacobsen
0960d78eb5 Use initial received WebSocket state in Bang & Olufsen (#152432) 2025-09-30 13:34:43 +02:00
andreimoraru
474b40511f Bump yt-dlp to 2025.09.26 (#153252) 2025-09-30 13:19:06 +02:00
Jan-Philipp Benecke
18b80aced3 Record current quality scale of Electricity Maps (#149241) 2025-09-30 11:38:16 +02:00
dependabot[bot]
b964d362b7 Bump docker/login-action from 3.5.0 to 3.6.0 (#153239)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-30 11:14:17 +02:00
G Johansson
3914e41f3c Rename resolver to nameserver in dnsip (#153223) 2025-09-30 10:46:59 +02:00
Erik Montnemery
82bdfcb99b Correct target filter in ecovacs services (#153241) 2025-09-30 10:39:18 +03:00
Marc Mueller
976cea600f Use attribute names for match class (#153191) 2025-09-29 23:12:54 +02:00
Tom
8c8713c3f7 Rework test split for airOS reauthentication flow (#153221) 2025-09-29 22:07:18 +02:00
G Johansson
2359ae6ce7 Bump pysmhi to 1.1.0 (#153222) 2025-09-29 22:04:59 +02:00
Paul Bottein
b570fd35c8 Replace legacy hass icons to mdi icons (#153204) 2025-09-29 20:04:21 +01:00
starkillerOG
9d94e6b3b4 Add Reolink bicycle sensitivity and delay (#153217) 2025-09-29 20:44:13 +02:00
Martin Hjelmare
cfab789823 Add hardware Zigbee flow strategy (#153190) 2025-09-29 20:08:43 +02:00
Erik Montnemery
81917425dc Add test which fails on duplicated statistics units (#153202)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
Co-authored-by: jbouwh <jan@jbsoft.nl>
2025-09-29 20:07:59 +02:00
Jan Bouwhuis
bfb62709d4 Add missing translation strings for added sensor device classes pm4 and reactive energy (#153215) 2025-09-29 19:55:09 +02:00
Joost Lekkerkerker
ca3f2ee782 Mark Konnected as Legacy (#153193) 2025-09-29 18:22:29 +01:00
Ludovic BOUÉ
fc8703a40f Matter DoorLock attributes (#151418)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-09-29 18:20:22 +01:00
c0ffeeca7
80517c7ac1 ZHA: rename radio to adapter (#153206) 2025-09-29 18:17:44 +01:00
Erik Montnemery
2b4b46eaf8 Add async_iterator util (#153194) 2025-09-29 18:54:23 +02:00
Martin Hjelmare
40b9dae608 Improve hardware flow strings (#153034) 2025-09-29 18:29:58 +02:00
Erik Montnemery
5975cd6e09 Revert "Add mg/m³ as a valid UOM for sensor/number Carbon Monoxide device class" (#153196) 2025-09-29 15:43:13 +01:00
RogerSelwyn
258c9ff52b Handle return result from ebusd being "empty" (#153199) 2025-09-29 16:08:42 +02:00
starkillerOG
89c5d498a4 Add Reolink Ai person type, vehicle type and animal type (#153170) 2025-09-29 15:39:29 +02:00
Artur Pragacz
76cb4d123a Filter out empty integration type in extended analytics (#153188) 2025-09-29 15:18:15 +02:00
Erik Montnemery
f0c29c7699 Revert "Add comment on conversion factor for Carbon monoxide on dependency molecular weight" (#153195) 2025-09-29 14:56:42 +02:00
Kyle Worrall
aa4151ced7 Fix for Hue Integration motion aware areas (#153079)
Co-authored-by: Marcel van der Veldt <m.vanderveldt@outlook.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-29 14:50:36 +02:00
G Johansson
0a6fa978fa Add timeout to dnsip (to handle stale connections) (#153086) 2025-09-29 14:49:38 +02:00
Simone Chemelli
dc02002b9d Bump aioamazondevices to 6.2.7 (#153185) 2025-09-29 14:30:42 +02:00
cdnninja
f071a3f38b Correct vesync water tank lifted key (#153173) 2025-09-29 14:29:25 +02:00
dependabot[bot]
b935231e47 Bump actions/dependency-review-action from 4.7.3 to 4.8.0 (#153180)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 13:17:20 +02:00
dependabot[bot]
b9f7613567 Bump github/codeql-action from 3.30.4 to 3.30.5 (#153179)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 13:15:53 +02:00
Maciej Bieniek
1289a031ab Add consumed energy sensor for Shelly pm1 and switch components (#153053) 2025-09-29 13:06:07 +03:00
Andrew Jackson
289546ef6d Bump aiomealie to 0.11.0 adding times to recipes (#153183) 2025-09-29 11:58:40 +02:00
Guido Schmitz
aacff4db5d Rework devolo Home Control config flow tests (#147083)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-29 09:47:07 +02:00
starkillerOG
f833b56122 Add Reolink siren state (#153169) 2025-09-29 08:42:38 +02:00
Tom Matheussen
7eb0f2993f Fix entities not being created when adding subentries for Satel Integra (#153139) 2025-09-28 21:37:35 -04:00
Michael
abb341abfe Add newly added cpu temperatures to diagnostics in FRITZ!Tools (#153168) 2025-09-28 22:40:10 +02:00
starkillerOG
0d90614369 Bump reolink-aio to 0.16.0 (#153161) 2025-09-28 21:55:39 +02:00
starkillerOG
ec84bebeea Add Reolink AI bicycle detection entity (#153163) 2025-09-28 21:54:59 +02:00
Shay Levy
9176867d6b Add Shelly EV charger sensors (#152722) 2025-09-28 22:45:11 +03:00
Allen Porter
281a137ff5 Add missing translations for Model Context Protocol integration (#153147) 2025-09-28 20:05:15 +02:00
tronikos
d6543480ac Refactor SQL integration (#153135) 2025-09-28 19:03:13 +02:00
Luca Graf
ae6391b866 Ignore gateway device in ViCare integration (#153097) 2025-09-28 16:04:22 +02:00
Joakim Plate
10b56e4258 Ensure togrill detects disconnected devices (#153067) 2025-09-28 15:34:56 +02:00
Erwin Douna
0ff2597957 Portainer add re-auth flow (#153077) 2025-09-28 15:31:50 +02:00
Artur Pragacz
026b28e962 Improve interview logging in Onkyo (#153095) 2025-09-28 15:26:40 +02:00
peteS-UK
9a1e67294a Extend timeout test in test_config_flow for Squeezebox to completion (#153080) 2025-09-28 15:20:47 +02:00
G Johansson
cdb448a5cc Use automatic reload options flow in random (#153103) 2025-09-28 01:02:33 +01:00
G Johansson
ab80e726e2 Use automatic reload options flow in filter (#153104) 2025-09-28 01:02:14 +01:00
G Johansson
2d5d0f67b2 Use automatic reload options flow in history_stats (#153115) 2025-09-28 01:01:33 +01:00
G Johansson
d4100b6096 Use automatic reload options flow in mold_indicator (#153106) 2025-09-28 01:00:48 +01:00
G Johansson
955e854d77 Use automatic reload options flow in utility_meter (#153111) 2025-09-28 01:00:07 +01:00
G Johansson
0c37f88c49 Use automatic reload options flow in derivative (#153112) 2025-09-28 00:59:07 +01:00
G Johansson
48167eeb9c Use automatic reload options flow in worldclock (#153105) 2025-09-28 00:58:20 +01:00
G Johansson
24177197f7 Use automatic reload options flow in generic_thermostat (#153108) 2025-09-28 00:57:12 +01:00
G Johansson
863fc0ba97 Use automatic reload options flow in switch_as_x (#153109) 2025-09-28 00:52:26 +01:00
G Johansson
9f7b229d02 Use automatic reload options flow in template (#153110) 2025-09-28 00:50:00 +01:00
G Johansson
ffd909f3d9 Use automatic reload options flow in group (#153116) 2025-09-28 00:48:44 +01:00
Tom
1ebf096a33 Add reauthentication flow to airOS (#153076)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-09-27 23:28:14 +02:00
Robert Resch
96d51965e5 Bump deebot-client to 15.0.0 (#153125) 2025-09-27 23:24:39 +02:00
G Johansson
04b510b020 Fix event range in workday calendar (#153128) 2025-09-27 23:22:39 +02:00
G Johansson
c9a301d50e Use automatic reload options flow in systemmonitor (#153107) 2025-09-27 20:50:14 +02:00
G Johansson
b304bd1a8b Use automatic reload options flow in local_file (#153114) 2025-09-27 20:49:39 +02:00
G Johansson
b99525b231 Use automatic reload options flow in tod (#153113) 2025-09-27 20:45:40 +02:00
G Johansson
634db13990 Use automatic reload options flow in trend (#153117) 2025-09-27 20:44:53 +02:00
peteS-UK
ad51a77989 Extend squeezebox config_flow test to completion (#153000)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-09-27 20:36:38 +02:00
G Johansson
3348a39e8a Use automatic reload options flow in generic_hygrostat (#153102) 2025-09-27 20:33:57 +02:00
Christian McHugh
81c2e356ec Fix: Set EPH climate heating as on only when boiler is actively heating (#152914) 2025-09-27 20:19:57 +02:00
Jan Bouwhuis
de6c3512d2 Add IMAP fetch message part feature (#152845) 2025-09-27 14:49:26 +02:00
G Johansson
36dc1e938a Fix can exclude optional holidays in workday (#153082) 2025-09-27 14:40:29 +02:00
Sören Beye
07a78cf6f7 Squeezebox: Proxy all the thumbnails (#147199)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-27 14:39:15 +02:00
Erwin Douna
eaa673e0c3 Portainer switch terminology to API token (#152958)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-27 14:32:25 +02:00
Simone Chemelli
f2c4ca081f Remove redundant code for Alexa Devices (#153083) 2025-09-27 13:05:07 +02:00
Thomas D
e3d707f0b4 Prevent duplicate entities for Volvo integration (#151779) 2025-09-27 12:29:11 +02:00
Tom
fb93fed2e5 Bump airOS dependency (#153065) 2025-09-27 01:20:51 +02:00
Björn Dalfors
95dfc2f23d Bump nibe dependency to 2.19.0 (#153062) 2025-09-26 23:49:40 +01:00
Franck Nijhof
408df2093a Update Home Assistant base image to 2025.09.3 (#153064) 2025-09-26 23:28:43 +02:00
Eskander Bejaoui
f32bf0cc3e nmap_tracker: Optimize default scan options (#153047) 2025-09-26 22:31:49 +02:00
peteS-UK
dbbe3145b6 Replace patch of entity_registry in test_config_flow for Squeezebox (#153039) 2025-09-26 22:17:47 +02:00
Erik Montnemery
f8bf3ea2ef Correct filter of target selector in motioneye services (#152971) 2025-09-26 22:08:19 +02:00
Bouwe Westerdijk
053bd31d43 Snapshot testing for Plugwise Switch platform (#153030) 2025-09-26 22:07:42 +02:00
DeerMaximum
1aefc3f37a NINA Use better wording for filters (#153050) 2025-09-26 22:05:10 +02:00
Joris Pelgröm
3de955d9ce Use UnitOfTime.DAYS instead of custom unit for LetPot number entity (#153054) 2025-09-26 21:58:17 +02:00
SapuSeven
0ff88fd366 Add None-check for VeSync fan device.state.display_status (#153055) 2025-09-26 21:57:01 +02:00
peteS-UK
eb84020773 Replace platform setup functions with fixtures with autouse in Squeezebox tests (#153057) 2025-09-26 21:49:58 +02:00
Tom
4bbfea3c7c Add SSL options during config_flow for airOS (#150325)
Co-authored-by: Åke Strandberg <ake@strandberg.eu>
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-26 21:38:27 +02:00
Josef Zweck
63d4fb7558 Ensure token validity in lamarzocco (#153058) 2025-09-26 21:36:03 +02:00
Artur Pragacz
953895cd81 Use satellite entity area in the assist pipeline (#153017) 2025-09-26 21:34:45 +02:00
Erwin Douna
a6c3f4efc0 Portainer add ability to skip SSL verification (#152955) 2025-09-26 21:32:49 +02:00
Paul Bottein
11e880d034 Update frontend to 20250926.0 (#153049) 2025-09-26 21:31:47 +02:00
Martin Hjelmare
e4d6bdb398 Fix Thread flow abort on multiple flows (#153048) 2025-09-26 18:48:51 +01:00
Andrew Jackson
6ced1783e3 Add discovery to Mealie (#151773)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-26 18:48:19 +02:00
Paulus Schoutsen
8051f78d10 Push ESPHome discovery to ZJS addon (#153004) 2025-09-26 10:12:56 -04:00
Josef Zweck
b724176b23 Bump pylamarzocco to 2.1.1 (#153027) 2025-09-26 15:46:24 +02:00
Erik Montnemery
fdca16ea92 Fix typing in ObjectSelectorConfig (#153043) 2025-09-26 15:18:18 +02:00
Stefan Agner
f8fd8b432a Update Home Assistant base image to 2025.09.2 (#153035) 2025-09-26 13:03:39 +02:00
lliwog
9148ae70ce Fix EZVIZ devices merging due to empty MAC addr (#152939) (#152981)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-26 12:47:11 +02:00
RogerSelwyn
447cb26d28 Protect against last_comms being None (#149366)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-26 12:35:04 +02:00
J. Nick Koston
2af36465f6 Bump aioesphomeapi to 41.11.0 (#153014) 2025-09-26 12:31:59 +02:00
dependabot[bot]
d5f7265424 Bump github/codeql-action from 3.30.3 to 3.30.4 (#153015)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-26 12:31:15 +02:00
Simone Chemelli
cc16af7f2d Code optimization for Uptime Robot (#153031) 2025-09-26 12:29:02 +02:00
Retha Runolfsson
7a4d75bc44 Add garage door opener for switchbot integration (#148460) 2025-09-26 12:11:59 +02:00
Bouwe Westerdijk
ec0380fd3b Snapshot testing for Plugwise Sensor platform (#153021) 2025-09-26 11:22:14 +02:00
Stefan Agner
b17cc71dfb Bump to home-assistant/wheels@2025.09.1 (#153025) 2025-09-26 11:04:02 +02:00
Erik Montnemery
89b327ed7b Remove device filter from target selector in bang_olufsen services (#152957) 2025-09-26 09:02:14 +02:00
Simone Chemelli
9bf361a1b8 Fix PIN failure if starting with 0 for Comelit SimpleHome (#152983) 2025-09-26 08:59:03 +02:00
J. Diego Rodríguez Royo
d11c171c75 Bump aiohomeconnect to version 0.20.0 (#153003) 2025-09-26 07:49:38 +02:00
puddly
c523c45d17 Allow ZHA discovery if discovery unique_id conflicts with config entry (#153009)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-26 07:39:00 +02:00
puddly
c1b9c0e1b6 Ignore discovery for existing ZHA entries (#152984) 2025-09-26 07:17:01 +02:00
puddly
487b9ff03e Bump ZHA to 0.0.73 (#153007) 2025-09-25 23:44:25 -04:00
Simone Chemelli
ec62b0cdfb Code optimization for Uptime Robot (#152993) 2025-09-26 00:34:09 +01:00
Brandon Harvey
6d0470064f Rename service to action in ESPHome (#152997) 2025-09-25 14:54:06 -05:00
Simone Chemelli
7450b3fd1a Improve tests for Alexa Devices (#152995) 2025-09-25 21:39:44 +02:00
Noah Husby
5b70910d77 Bump aiorussound to 4.8.2 (#152988) 2025-09-25 20:34:29 +02:00
Abílio Costa
52de5ff5ff Remove deprecated zone and event condition keys (#152986) 2025-09-25 19:23:40 +02:00
J. Nick Koston
c4389a1679 Bump aioesphomeapi to 41.10.0 (#152975)
Co-authored-by: Michael Hansen <mike@rhasspy.org>
2025-09-25 19:21:17 +02:00
Norbert Rittel
35faaa6cae Add missing square brackets to references in fully_kiosk actions (#152987) 2025-09-25 19:19:27 +02:00
Paul Bottein
3c0b13975a Update frontend to 20250925.1 (#152985) 2025-09-25 19:05:12 +02:00
Simone Chemelli
bc88696339 Remove deprecated sensors and update remaning for Alexa Devices (#151230) 2025-09-25 18:59:53 +02:00
Erik Montnemery
8f99c3f64a Remove device filter from target selector in lyric services (#152970) 2025-09-25 18:45:32 +02:00
Erik Montnemery
88016d96d4 Remove device and entity filter from target selector in homeassistant services (#152969) 2025-09-25 17:41:54 +01:00
Erik Montnemery
47df73b18f Remove device filter from target selector in google_mail services (#152968) 2025-09-25 18:32:12 +02:00
Maciej Bieniek
1c12d2b8cd Bump accuweather to version 4.2.2 (#152965) 2025-09-25 18:30:47 +02:00
Erik Montnemery
eb38837a8c Replace target selector with device selector in fully_kiosk services (#152959)
Co-authored-by: Franck Nijhof <git@frenck.dev>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-25 18:30:05 +02:00
Erik Montnemery
159c7fbfd1 Correct filter of target selector in sonos services (#152972) 2025-09-25 18:29:26 +02:00
Joost Lekkerkerker
7ee31f0884 Bump pySmartThings to 3.3.0 (#152977) 2025-09-25 17:57:30 +02:00
Daniel Potthast
0c5e12571a Update mvglive component (#146479)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-25 17:20:43 +02:00
Luke Lashley
9db973217f Fix incorrect Roborock test (#152980) 2025-09-25 17:18:24 +02:00
Artur Pragacz
cf1a745283 Move condition-specific fields into options (#152635) 2025-09-25 15:55:50 +02:00
peteS-UK
834e3f1963 Add HassKey for hass.data in Squeezebox (#149129) 2025-09-25 14:05:40 +02:00
Joakim Sørensen
3f8f7573c9 Bump hass-nabucasa from 1.1.1 to 1.1.2 (#152950) 2025-09-25 11:34:14 +01:00
Karsten Bade
0ae272f1f6 Add return types and docstring to sonos component (#152946) 2025-09-25 11:34:38 +02:00
Paul Bottein
8774295e2e Update frontend to 20250925.0 (#152945) 2025-09-25 11:33:01 +02:00
Erwin Douna
0c8d2594ef Portainer fix unique entity (#152941)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-25 09:49:22 +02:00
Simone Chemelli
205bd2676b Update IQS to platinum for Alexa Devices (#152905) 2025-09-25 09:45:50 +02:00
dependabot[bot]
25849fd9cc Bump actions/cache from 4.2.4 to 4.3.0 (#152934)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-25 09:43:03 +02:00
Sab44
7d6eac9ff7 Bump librehardwaremonitor-api to version 1.4.0 (#152938) 2025-09-25 09:42:31 +02:00
Luke Lashley
31017ebc98 Fix logical error when user has no Roborock maps (#152752) 2025-09-25 09:39:52 +02:00
Jimmy Zhening Luo
724a7b0ecc Quality: mark installation param doc as done (#152909) 2025-09-25 09:06:13 +02:00
Paulus Schoutsen
91e13d447a Prevent common control calling async methods from thread (#152931)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-09-24 23:09:54 -04:00
J. Nick Koston
7c8ad9d535 Fix ESPHome reauth not being triggered on incorrect password (#152911) 2025-09-24 22:27:40 -04:00
Franck Nijhof
9cd3ab853d Add block Spook < 4.0.0 as breaking Home Assistant (#152930) 2025-09-24 22:18:06 -04:00
Paulus Schoutsen
0b0f8c5829 Remove some more domains from common controls (#152927) 2025-09-24 22:15:29 -04:00
J. Nick Koston
ae7bc7fb1b Bump aioesphomeapi to 41.9.4 (#152923) 2025-09-24 19:16:48 -05:00
Franck Nijhof
09750872b5 Bump version to 2025.11.0dev0 (#152915) 2025-09-24 23:55:32 +02:00
Franck Nijhof
076e51017b Bump to home-assistant/wheels@2025.09.0 (#152920) 2025-09-24 23:12:20 +02:00
Simone Chemelli
95e7b00996 Update IQS to platinum for Comelit SimpleHome (#152906) 2025-09-24 22:03:31 +01:00
J. Nick Koston
ddecf1ac21 Bump aioesphomeapi to 41.9.3 to fix segfault (#152912) 2025-09-24 22:00:45 +01:00
629 changed files with 38835 additions and 12067 deletions

View File

@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -257,7 +257,7 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -332,14 +332,14 @@ jobs:
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -504,7 +504,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.10"
HA_SHORT_VERSION: "2025.11"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -570,7 +570,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -711,7 +711,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency review
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
with:
license-check: false # We use our own license audit checks
@@ -741,7 +741,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: .mypy_cache
key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with:
category: "/language:python"

View File

@@ -203,6 +203,7 @@ homeassistant.components.feedreader.*
homeassistant.components.file_upload.*
homeassistant.components.filesize.*
homeassistant.components.filter.*
homeassistant.components.firefly_iii.*
homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.*
@@ -325,6 +326,7 @@ homeassistant.components.london_underground.*
homeassistant.components.lookin.*
homeassistant.components.lovelace.*
homeassistant.components.luftdaten.*
homeassistant.components.lunatone.*
homeassistant.components.madvr.*
homeassistant.components.manual.*
homeassistant.components.mastodon.*
@@ -553,6 +555,7 @@ homeassistant.components.vacuum.*
homeassistant.components.vallox.*
homeassistant.components.valve.*
homeassistant.components.velbus.*
homeassistant.components.vivotek.*
homeassistant.components.vlc_telnet.*
homeassistant.components.vodafone_station.*
homeassistant.components.volvo.*

8
CODEOWNERS generated
View File

@@ -492,6 +492,8 @@ build.json @home-assistant/supervisor
/tests/components/filesize/ @gjohansson-ST
/homeassistant/components/filter/ @dgomes
/tests/components/filter/ @dgomes
/homeassistant/components/firefly_iii/ @erwindouna
/tests/components/firefly_iii/ @erwindouna
/homeassistant/components/fireservicerota/ @cyberjunky
/tests/components/fireservicerota/ @cyberjunky
/homeassistant/components/firmata/ @DaAwesomeP
@@ -908,6 +910,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/luci/ @mzdrale
/homeassistant/components/luftdaten/ @fabaff @frenck
/tests/components/luftdaten/ @fabaff @frenck
/homeassistant/components/lunatone/ @MoonDevLT
/tests/components/lunatone/ @MoonDevLT
/homeassistant/components/lupusec/ @majuss @suaveolent
/tests/components/lupusec/ @majuss @suaveolent
/homeassistant/components/lutron/ @cdheiser @wilburCForce
@@ -953,6 +957,8 @@ build.json @home-assistant/supervisor
/tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch
/homeassistant/components/meteoclimatic/ @adrianmo
/tests/components/meteoclimatic/ @adrianmo
@@ -1190,8 +1196,6 @@ build.json @home-assistant/supervisor
/tests/components/plex/ @jjlawren
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
/tests/components/plugwise/ @CoMPaTech @bouwew
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
/homeassistant/components/point/ @fredrike
/tests/components/point/ @fredrike
/homeassistant/components/pooldose/ @lmaertin

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -616,34 +616,44 @@ async def async_enable_logging(
),
)
# Log errors to a file if we have write access to file or config dir
logger = logging.getLogger()
logger.setLevel(logging.INFO if verbose else logging.WARNING)
if log_file is None:
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
if "SUPERVISOR" in os.environ:
_LOGGER.info("Running in Supervisor, not logging to file")
# Rename the default log file if it exists, since previous versions created
# it even on Supervisor
if os.path.isfile(default_log_path):
with contextlib.suppress(OSError):
os.rename(default_log_path, f"{default_log_path}.old")
err_log_path = None
else:
err_log_path = default_log_path
else:
err_log_path = os.path.abspath(log_file)
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
if err_log_path:
err_path_exists = os.path.isfile(err_log_path)
err_dir = os.path.dirname(err_log_path)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days
)
# Check if we can write to the error log if it exists or that
# we can create files in the containing directory if not.
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler = await hass.async_add_executor_job(
_create_log_file, err_log_path, log_rotate_days
)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
logger.addHandler(err_handler)
logger = logging.getLogger()
logger.addHandler(err_handler)
logger.setLevel(logging.INFO if verbose else logging.WARNING)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
# Save the log file location for access by other components.
hass.data[DATA_LOGGING] = err_log_path
else:
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
async_activate_log_queue_handler(hass)

View File

@@ -1,5 +0,0 @@
{
"domain": "ibm",
"name": "IBM",
"integrations": ["watson_iot", "watson_tts"]
}

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
@@ -14,7 +15,7 @@ from airos.exceptions import (
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
@@ -24,6 +25,11 @@ from homeassistant.const import (
)
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOS8
@@ -54,50 +60,107 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
MINOR_VERSION = 2
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
"""Handle the manual input of host and credentials."""
self.errors = {}
if user_input is not None:
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
self.hass,
verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8(
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
self._abort_if_unique_id_configured()
validated_info = await self._validate_and_get_device_info(user_input)
if validated_info:
return self.async_create_entry(
title=airos_data.host.hostname, data=user_input
title=validated_info["title"],
data=validated_info["data"],
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
)
async def _validate_and_get_device_info(
self, config_data: dict[str, Any]
) -> dict[str, Any] | None:
"""Validate user input with the device API."""
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
self.hass,
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8(
host=config_data[CONF_HOST],
username=config_data[CONF_USERNAME],
password=config_data[CONF_PASSWORD],
session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
self.errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
self.errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
self.errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception during credential validation")
self.errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return {"title": airos_data.host.hostname, "data": config_data}
return None
async def async_step_reauth(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm(user_input)
async def async_step_reauth_confirm(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
self.errors = {}
if user_input:
validate_data = {**self._get_reauth_entry().data, **user_input}
if await self._validate_and_get_device_info(config_data=validate_data):
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates=validate_data,
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
errors=self.errors,
)

View File

@@ -14,7 +14,7 @@ from airos.exceptions import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
try:
await self.airos_device.login()
return await self.airos_device.status()
except (AirOSConnectionAuthenticationError,) as err:
except AirOSConnectionAuthenticationError as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryError(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.5.1"]
"requirements": ["airos==0.5.4"]
}

View File

@@ -2,6 +2,14 @@
"config": {
"flow_title": "Ubiquiti airOS device",
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::airos::config::step::user::data_description::password%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
@@ -34,7 +42,9 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
}
},
"entity": {

View File

@@ -23,6 +23,10 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
URL_API_INTEGRATION = {
"url": "https://dashboard.airthings.com/integrations/api-integration"
}
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Airthings."""
@@ -37,11 +41,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"url": (
"https://dashboard.airthings.com/integrations/api-integration"
),
},
description_placeholders=URL_API_INTEGRATION,
)
errors = {}
@@ -65,5 +65,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_create_entry(title="Airthings", data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
description_placeholders=URL_API_INTEGRATION,
)

View File

@@ -4,9 +4,9 @@
"user": {
"data": {
"id": "ID",
"secret": "Secret",
"description": "Login at {url} to find your credentials"
}
"secret": "Secret"
},
"description": "Log in at {url} to find your credentials"
}
},
"error": {

View File

@@ -171,7 +171,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="no_devices_found")
titles = {
address: discovery.device.name
address: get_name(discovery.device)
for (address, discovery) in self._discovered_devices.items()
}
return self.async_show_form(

View File

@@ -24,5 +24,5 @@
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
"iot_class": "local_polling",
"requirements": ["airthings-ble==0.9.2"]
"requirements": ["airthings-ble==1.1.1"]
}

View File

@@ -114,6 +114,8 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
),
}
PARALLEL_UPDATES = 0
@callback
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:

View File

@@ -6,6 +6,9 @@
"description": "[%key:component::bluetooth::config::step::user::description%]",
"data": {
"address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "The Airthings devices discovered via Bluetooth."
}
},
"bluetooth_confirm": {

View File

@@ -2,17 +2,14 @@
from airtouch4pyapi import AirTouch
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .coordinator import AirtouchDataUpdateCoordinator
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator
PLATFORMS = [Platform.CLIMATE]
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
"""Set up AirTouch4 from a config entry."""
@@ -22,7 +19,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
info = airtouch.GetAcs()
if not info:
raise ConfigEntryNotReady
coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator

View File

@@ -2,26 +2,34 @@
import logging
from airtouch4pyapi import AirTouch
from airtouch4pyapi.airtouch import AirTouchStatus
from homeassistant.components.climate import SCAN_INTERVAL
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Airtouch data."""
def __init__(self, hass, airtouch):
def __init__(
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
) -> None:
"""Initialize global Airtouch data updater."""
self.airtouch = airtouch
super().__init__(
hass,
_LOGGER,
config_entry=entry,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.7"]
"requirements": ["aioamazondevices==6.2.8"]
}

View File

@@ -629,7 +629,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
devices_info.append(
{
"entities": [],
"entry_type": device_entry.entry_type,
"has_configuration_url": device_entry.configuration_url is not None,
"hw_version": device_entry.hw_version,
@@ -638,6 +637,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
"model_id": device_entry.model_id,
"sw_version": device_entry.sw_version,
"via_device": device_entry.via_device_id,
"entities": [],
}
)

View File

@@ -2,9 +2,7 @@
from __future__ import annotations
from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
from typing import Any
TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes",
@@ -36,7 +34,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy(raw: T) -> T:
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists."""
if raw is None:

View File

@@ -26,9 +26,6 @@ async def async_setup_entry(
if CONF_HOST in config_entry.data:
coordinator = AwairLocalDataUpdateCoordinator(hass, config_entry, session)
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
else:
coordinator = AwairCloudDataUpdateCoordinator(hass, config_entry, session)
@@ -36,6 +33,11 @@ async def async_setup_entry(
config_entry.runtime_data = coordinator
if CONF_HOST in config_entry.data:
config_entry.async_on_unload(
config_entry.add_update_listener(_async_update_listener)
)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True

View File

@@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import frame
from homeassistant.util import slugify
from homeassistant.util.async_iterator import AsyncIteratorReader, AsyncIteratorWriter
from . import util
from .agent import BackupAgent
@@ -144,7 +145,7 @@ class DownloadBackupView(HomeAssistantView):
return Response(status=HTTPStatus.NOT_FOUND)
else:
stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
reader = cast(IO[bytes], AsyncIteratorReader(hass.loop, stream))
worker_done_event = asyncio.Event()
@@ -152,7 +153,7 @@ class DownloadBackupView(HomeAssistantView):
"""Call by the worker thread when it's done."""
hass.loop.call_soon_threadsafe(worker_done_event.set)
stream = util.AsyncIteratorWriter(hass)
stream = AsyncIteratorWriter(hass.loop)
worker = threading.Thread(
target=util.decrypt_backup,
args=[backup, reader, stream, password, on_done, 0, []],

View File

@@ -38,6 +38,7 @@ from homeassistant.helpers import (
)
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util, json as json_util
from homeassistant.util.async_iterator import AsyncIteratorReader
from . import util as backup_util
from .agent import (
@@ -72,7 +73,6 @@ from .models import (
)
from .store import BackupStore
from .util import (
AsyncIteratorReader,
DecryptedBackupStreamer,
EncryptedBackupStreamer,
make_backup_dir,
@@ -1525,7 +1525,7 @@ class BackupManager:
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
else:
backup_stream = await agent.async_download_backup(backup_id)
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
reader = cast(IO[bytes], AsyncIteratorReader(self.hass.loop, backup_stream))
try:
await self.hass.async_add_executor_job(
validate_password_stream, reader, password

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
import asyncio
from collections.abc import AsyncIterator, Callable, Coroutine
from concurrent.futures import CancelledError, Future
import copy
from dataclasses import dataclass, replace
from io import BytesIO
@@ -14,7 +13,7 @@ from pathlib import Path, PurePath
from queue import SimpleQueue
import tarfile
import threading
from typing import IO, Any, Self, cast
from typing import IO, Any, cast
import aiohttp
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
@@ -23,6 +22,11 @@ from homeassistant.backup_restore import password_to_key
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.async_iterator import (
Abort,
AsyncIteratorReader,
AsyncIteratorWriter,
)
from homeassistant.util.json import JsonObjectType, json_loads_object
from .const import BUF_SIZE, LOGGER
@@ -59,12 +63,6 @@ class BackupEmpty(DecryptError):
_message = "No tar files found in the backup."
class AbortCipher(HomeAssistantError):
"""Abort the cipher operation."""
_message = "Abort cipher operation."
def make_backup_dir(path: Path) -> None:
"""Create a backup directory if it does not exist."""
path.mkdir(exist_ok=True)
@@ -166,106 +164,6 @@ def validate_password(path: Path, password: str | None) -> bool:
return False
class AsyncIteratorReader:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._stream = stream
self._buffer: bytes | None = None
self._next_future: Future[bytes | None] | None = None
self._pos: int = 0
async def _next(self) -> bytes | None:
"""Get the next chunk from the iterator."""
return await anext(self._stream, None)
def abort(self) -> None:
"""Abort the reader."""
self._aborted = True
if self._next_future is not None:
self._next_future.cancel()
def read(self, n: int = -1, /) -> bytes:
"""Read data from the iterator."""
result = bytearray()
while n < 0 or len(result) < n:
if not self._buffer:
self._next_future = asyncio.run_coroutine_threadsafe(
self._next(), self._hass.loop
)
if self._aborted:
self._next_future.cancel()
raise AbortCipher
try:
self._buffer = self._next_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos = 0
if not self._buffer:
# The stream is exhausted
break
chunk = self._buffer[self._pos : self._pos + n]
result.extend(chunk)
n -= len(chunk)
self._pos += len(chunk)
if self._pos == len(self._buffer):
self._buffer = None
return bytes(result)
def close(self) -> None:
"""Close the iterator."""
class AsyncIteratorWriter:
"""Wrap an AsyncIterator."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the wrapper."""
self._aborted = False
self._hass = hass
self._pos: int = 0
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
self._write_future: Future[bytes | None] | None = None
def __aiter__(self) -> Self:
"""Return the iterator."""
return self
async def __anext__(self) -> bytes:
"""Get the next chunk from the iterator."""
if data := await self._queue.get():
return data
raise StopAsyncIteration
def abort(self) -> None:
"""Abort the writer."""
self._aborted = True
if self._write_future is not None:
self._write_future.cancel()
def tell(self) -> int:
"""Return the current position in the iterator."""
return self._pos
def write(self, s: bytes, /) -> int:
"""Write data to the iterator."""
self._write_future = asyncio.run_coroutine_threadsafe(
self._queue.put(s), self._hass.loop
)
if self._aborted:
self._write_future.cancel()
raise AbortCipher
try:
self._write_future.result()
except CancelledError as err:
raise AbortCipher from err
self._pos += len(s)
return len(s)
def validate_password_stream(
input_stream: IO[bytes],
password: str | None,
@@ -342,7 +240,7 @@ def decrypt_backup(
finally:
# Write an empty chunk to signal the end of the stream
output_stream.write(b"")
except AbortCipher:
except Abort:
LOGGER.debug("Cipher operation aborted")
finally:
on_done(error)
@@ -430,7 +328,7 @@ def encrypt_backup(
finally:
# Write an empty chunk to signal the end of the stream
output_stream.write(b"")
except AbortCipher:
except Abort:
LOGGER.debug("Cipher operation aborted")
finally:
on_done(error)
@@ -557,8 +455,8 @@ class _CipherBackupStreamer:
self._hass.loop.call_soon_threadsafe(worker_status.done.set)
stream = await self._open_stream()
reader = AsyncIteratorReader(self._hass, stream)
writer = AsyncIteratorWriter(self._hass)
reader = AsyncIteratorReader(self._hass.loop, stream)
writer = AsyncIteratorWriter(self._hass.loop)
worker = threading.Thread(
target=self._cipher_func,
args=[

View File

@@ -73,11 +73,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
# Add the websocket and API client
entry.runtime_data = BangOlufsenData(websocket, client)
# Start WebSocket connection
await client.connect_notifications(remote_control=True, reconnect=True)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
# Start WebSocket connection once the platforms have been loaded.
# This ensures that the initial WebSocket notifications are dispatched to entities
await client.connect_notifications(remote_control=True, reconnect=True)
return True

View File

@@ -125,7 +125,8 @@ async def async_setup_entry(
async_add_entities(
new_entities=[
BangOlufsenMediaPlayer(config_entry, config_entry.runtime_data.client)
]
],
update_before_add=True,
)
# Register actions.
@@ -266,34 +267,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._software_status.software_version,
)
# Get overall device state once. This is handled by WebSocket events the rest of the time.
product_state = await self._client.get_product_state()
# Get volume information.
if product_state.volume:
self._volume = product_state.volume
# Get all playback information.
# Ensure that the metadata is not None upon startup
if product_state.playback:
if product_state.playback.metadata:
self._playback_metadata = product_state.playback.metadata
self._remote_leader = product_state.playback.metadata.remote_leader
if product_state.playback.progress:
self._playback_progress = product_state.playback.progress
if product_state.playback.source:
self._source_change = product_state.playback.source
if product_state.playback.state:
self._playback_state = product_state.playback.state
# Set initial state
if self._playback_state.value:
self._state = self._playback_state.value
self._attr_media_position_updated_at = utcnow()
# Get the highest resolution available of the given images.
self._media_image = get_highest_resolution_artwork(self._playback_metadata)
# If the device has been updated with new sources, then the API will fail here.
await self._async_update_sources()

View File

@@ -3,16 +3,12 @@ beolink_allstandby:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_expand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
all_discovered:
required: false
@@ -37,8 +33,6 @@ beolink_join:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false
@@ -71,16 +65,12 @@ beolink_leave:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_unexpand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false

View File

@@ -19,8 +19,8 @@
"bleak-retry-connector==4.4.3",
"bluetooth-adapters==2.1.0",
"bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.2",
"bluetooth-data-tools==1.28.3",
"dbus-fast==2.44.3",
"habluetooth==5.6.4"
"habluetooth==5.7.0"
]
}

View File

@@ -315,9 +315,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component))
frontend.async_register_built_in_panel(
hass, "calendar", "calendar", "hass:calendar"
)
frontend.async_register_built_in_panel(hass, "calendar", "calendar", "mdi:calendar")
websocket_api.async_register_command(hass, handle_calendar_event_create)
websocket_api.async_register_command(hass, handle_calendar_event_delete)

View File

@@ -51,12 +51,6 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_time_interval
@@ -118,12 +112,6 @@ ATTR_FILENAME: Final = "filename"
ATTR_MEDIA_PLAYER: Final = "media_player"
ATTR_FORMAT: Final = "format"
# These constants are deprecated as of Home Assistant 2024.10
# Please use the StreamType enum instead.
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
class CameraEntityFeature(IntFlag):
"""Supported features of the camera entity."""
@@ -1117,11 +1105,3 @@ async def async_handle_record_service(
duration=service_call.data[CONF_DURATION],
lookback=service_call.data[CONF_LOOKBACK],
)
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@@ -53,7 +53,6 @@ from .const import (
CONF_ACME_SERVER,
CONF_ALEXA,
CONF_ALIASES,
CONF_CLOUDHOOK_SERVER,
CONF_COGNITO_CLIENT_ID,
CONF_ENTITY_CONFIG,
CONF_FILTER,
@@ -130,7 +129,6 @@ CONFIG_SCHEMA = vol.Schema(
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
vol.Optional(CONF_ACCOUNTS_SERVER): str,
vol.Optional(CONF_ACME_SERVER): str,
vol.Optional(CONF_CLOUDHOOK_SERVER): str,
vol.Optional(CONF_RELAYER_SERVER): str,
vol.Optional(CONF_REMOTESTATE_SERVER): str,
vol.Optional(CONF_SERVICEHANDLERS_SERVER): str,

View File

@@ -78,7 +78,6 @@ CONF_USER_POOL_ID = "user_pool_id"
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
CONF_ACCOUNTS_SERVER = "accounts_server"
CONF_ACME_SERVER = "acme_server"
CONF_CLOUDHOOK_SERVER = "cloudhook_server"
CONF_RELAYER_SERVER = "relayer_server"
CONF_REMOTESTATE_SERVER = "remotestate_server"
CONF_SERVICEHANDLERS_SERVER = "servicehandlers_server"

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.1.1"],
"requirements": ["hass-nabucasa==1.2.0"],
"single_config_entry": true
}

View File

@@ -0,0 +1,106 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
The integration does not provide any actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: |
Stale docstring and test name: `test_form_home` and reusing result.
Extract `async_setup_entry` into own fixture.
Avoid importing `config_flow` in tests.
Test reauth with errors
config-flow:
status: todo
comment: |
The config flow misses data descriptions.
Remove URLs from data descriptions, they should be replaced with placeholders.
Make use of Electricity Maps zone keys in country code as dropdown.
Make use of location selector for coordinates.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: todo
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Use `hass.config_entries.async_setup` instead of assert await `async_setup_component(hass, DOMAIN, {})`
`test_sensor` could use `snapshot_platform`
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
discovery:
status: exempt
comment: |
This integration cannot be discovered, it is a connecting to a cloud service.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: |
This integration connect to a single device per configuration entry.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -49,7 +49,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the config component."""
frontend.async_register_built_in_panel(
hass, "config", "config", "hass:cog", require_admin=True
hass, "config", "config", "mdi:cog", require_admin=True
)
for panel in SECTIONS:

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
from http import HTTPStatus
import logging
from typing import Any, NoReturn
from aiohttp import web
@@ -23,7 +24,12 @@ from homeassistant.helpers.data_entry_flow import (
FlowManagerResourceView,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.json import json_fragment
from homeassistant.helpers.json import (
JSON_DUMP,
find_paths_unserializable_data,
json_bytes,
json_fragment,
)
from homeassistant.loader import (
Integration,
IntegrationNotFound,
@@ -31,6 +37,9 @@ from homeassistant.loader import (
async_get_integrations,
async_get_loaded_integration,
)
from homeassistant.util.json import format_unserializable_data
_LOGGER = logging.getLogger(__name__)
@callback
@@ -402,18 +411,40 @@ def config_entries_flow_subscribe(
connection.subscriptions[msg["id"]] = hass.config_entries.flow.async_subscribe_flow(
async_on_flow_init_remove
)
connection.send_message(
websocket_api.event_message(
msg["id"],
[
{"type": None, "flow_id": flw["flow_id"], "flow": flw}
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
try:
serialized_flows = [
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
for flw in hass.config_entries.flow.async_progress()
if flw["context"]["source"]
not in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
)
]
except (ValueError, TypeError):
# If we can't serialize, we'll filter out unserializable flows
serialized_flows = []
for flw in hass.config_entries.flow.async_progress():
if flw["context"]["source"] in (
config_entries.SOURCE_RECONFIGURE,
config_entries.SOURCE_USER,
):
continue
try:
serialized_flows.append(
json_bytes({"type": None, "flow_id": flw["flow_id"], "flow": flw})
)
],
except (ValueError, TypeError):
_LOGGER.error(
"Unable to serialize to JSON. Bad data found at %s",
format_unserializable_data(
find_paths_unserializable_data(flw, dump=JSON_DUMP)
),
)
continue
connection.send_message(
websocket_api.messages.construct_event_message(
msg["id"], b"".join((b"[", b",".join(serialized_flows), b"]"))
)
)
connection.send_result(msg["id"])

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"quality_scale": "bronze",
"requirements": ["pycync==0.4.0"]
"requirements": ["pycync==0.4.1"]
}

View File

@@ -32,6 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SOURCE: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_handle_source_entity_changes(
@@ -46,15 +47,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))

View File

@@ -140,6 +140,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1
MINOR_VERSION = 4

View File

@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
_hass: HomeAssistant
_config: ConfigType
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate device condition config."""
return await async_validate_device_automation_config(
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
)
"""Validate config.
This is here just to satisfy the abstract class interface. It is never called.
"""
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition."""

View File

@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
self._attr_translation_key = "button"
self._attr_translation_placeholders = {"key": str(key)}
def _sync(self, message: tuple) -> None:
def sync_callback(self, message: tuple) -> None:
"""Update the binary sensor state."""
if (
message[0] == self._remote_control_property.element_uid

View File

@@ -48,7 +48,6 @@ class DevoloDeviceEntity(Entity):
)
self.subscriber: Subscriber | None = None
self.sync_callback = self._sync
self._value: float
@@ -69,7 +68,7 @@ class DevoloDeviceEntity(Entity):
self._device_instance.uid, self.subscriber
)
def _sync(self, message: tuple) -> None:
def sync_callback(self, message: tuple) -> None:
"""Update the state."""
if message[0] == self._attr_unique_id:
self._value = message[1]

View File

@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
"""
return f"{self._attr_unique_id}_{self._sensor_type}"
def _sync(self, message: tuple) -> None:
def sync_callback(self, message: tuple) -> None:
"""Update the consumption sensor state."""
if message[0] == self._attr_unique_id:
self._value = getattr(

View File

@@ -13,8 +13,3 @@ class Subscriber:
"""Initiate the subscriber."""
self.name = name
self.callback = callback
def update(self, message: str) -> None:
"""Trigger hass to update the device."""
_LOGGER.debug('%s got message "%s"', self.name, message)
self.callback(message)

View File

@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
"""Switch off the device."""
self._binary_switch_property.set(state=False)
def _sync(self, message: tuple) -> None:
def sync_callback(self, message: tuple) -> None:
"""Update the binary switch state and consumption."""
if message[0].startswith("devolo.BinarySwitch"):
self._attr_is_on = self._device_instance.binary_switch_property[

View File

@@ -56,16 +56,16 @@ async def async_setup_entry(
hostname = entry.data[CONF_HOSTNAME]
name = entry.data[CONF_NAME]
resolver_ipv4 = entry.options[CONF_RESOLVER]
resolver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
nameserver_ipv4 = entry.options[CONF_RESOLVER]
nameserver_ipv6 = entry.options[CONF_RESOLVER_IPV6]
port_ipv4 = entry.options[CONF_PORT]
port_ipv6 = entry.options[CONF_PORT_IPV6]
entities = []
if entry.data[CONF_IPV4]:
entities.append(WanIpSensor(name, hostname, resolver_ipv4, False, port_ipv4))
entities.append(WanIpSensor(name, hostname, nameserver_ipv4, False, port_ipv4))
if entry.data[CONF_IPV6]:
entities.append(WanIpSensor(name, hostname, resolver_ipv6, True, port_ipv6))
entities.append(WanIpSensor(name, hostname, nameserver_ipv6, True, port_ipv6))
async_add_entities(entities, update_before_add=True)
@@ -77,11 +77,13 @@ class WanIpSensor(SensorEntity):
_attr_translation_key = "dnsip"
_unrecorded_attributes = frozenset({"resolver", "querytype", "ip_addresses"})
resolver: aiodns.DNSResolver
def __init__(
self,
name: str,
hostname: str,
resolver: str,
nameserver: str,
ipv6: bool,
port: int,
) -> None:
@@ -90,11 +92,11 @@ class WanIpSensor(SensorEntity):
self._attr_unique_id = f"{hostname}_{ipv6}"
self.hostname = hostname
self.port = port
self._resolver = resolver
self.nameserver = nameserver
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
self._retries = DEFAULT_RETRIES
self._attr_extra_state_attributes = {
"resolver": resolver,
"resolver": nameserver,
"querytype": self.querytype,
}
self._attr_device_info = DeviceInfo(
@@ -104,13 +106,13 @@ class WanIpSensor(SensorEntity):
model=aiodns.__version__,
name=name,
)
self.resolver: aiodns.DNSResolver
self.create_dns_resolver()
def create_dns_resolver(self) -> None:
"""Create the DNS resolver."""
self.resolver = aiodns.DNSResolver(tcp_port=self.port, udp_port=self.port)
self.resolver.nameservers = [self._resolver]
self.resolver = aiodns.DNSResolver(
nameservers=[self.nameserver], tcp_port=self.port, udp_port=self.port
)
async def async_update(self) -> None:
"""Get the current DNS IP address for hostname."""

View File

@@ -2,3 +2,4 @@ raw_get_positions:
target:
entity:
domain: vacuum
integration: ecovacs

View File

@@ -7,7 +7,7 @@
"iot_class": "local_polling",
"loggers": ["pyenphase"],
"quality_scale": "platinum",
"requirements": ["pyenphase==2.3.0"],
"requirements": ["pyenphase==2.4.0"],
"zeroconf": [
{
"type": "_enphase-envoy._tcp.local."

View File

@@ -10,7 +10,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Filter from a config entry."""
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -18,8 +17,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Filter config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -246,6 +246,7 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -0,0 +1,27 @@
"""The Firefly III integration."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Set up Firefly III from a config entry."""
coordinator = FireflyDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: FireflyConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -0,0 +1,140 @@
"""Config flow for the Firefly III integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_URL): str,
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
vol.Required(CONF_API_KEY): str,
}
)
async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
"""Validate the user input allows us to connect."""
try:
client = Firefly(
api_url=data[CONF_URL],
api_key=data[CONF_API_KEY],
session=async_get_clientsession(hass),
)
await client.get_about()
except FireflyAuthenticationError:
raise InvalidAuth from None
except FireflyConnectionError as err:
raise CannotConnect from err
except FireflyTimeoutError as err:
raise FireflyClientTimeout from err
return True
class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Firefly III."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
try:
await _validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=user_input[CONF_URL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth when Firefly III API authentication fails."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth: ask for a new API key and validate."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
try:
await _validate_input(
self.hass,
data={
**reauth_entry.data,
CONF_API_KEY: user_input[CONF_API_KEY],
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class FireflyClientTimeout(HomeAssistantError):
"""Error to indicate a timeout occurred."""

View File

@@ -0,0 +1,6 @@
"""Constants for the Firefly III integration."""
DOMAIN = "firefly_iii"
MANUFACTURER = "Firefly III"
NAME = "Firefly III"

View File

@@ -0,0 +1,137 @@
"""Data Update Coordinator for Firefly III integration."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
from aiohttp import CookieJar
from pyfirefly import (
Firefly,
FireflyAuthenticationError,
FireflyConnectionError,
FireflyTimeoutError,
)
from pyfirefly.models import Account, Bill, Budget, Category, Currency
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type FireflyConfigEntry = ConfigEntry[FireflyDataUpdateCoordinator]
DEFAULT_SCAN_INTERVAL = timedelta(minutes=5)
@dataclass
class FireflyCoordinatorData:
"""Data structure for Firefly III coordinator data."""
accounts: list[Account]
categories: list[Category]
category_details: list[Category]
budgets: list[Budget]
bills: list[Bill]
primary_currency: Currency
class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]):
"""Coordinator to manage data updates for Firefly III integration."""
config_entry: FireflyConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: FireflyConfigEntry) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.firefly = Firefly(
api_url=self.config_entry.data[CONF_URL],
api_key=self.config_entry.data[CONF_API_KEY],
session=async_create_clientsession(
self.hass,
self.config_entry.data[CONF_VERIFY_SSL],
cookie_jar=CookieJar(unsafe=True),
),
)
async def _async_setup(self) -> None:
"""Set up the coordinator."""
try:
await self.firefly.get_about()
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
async def _async_update_data(self) -> FireflyCoordinatorData:
"""Fetch data from Firefly III API."""
now = datetime.now()
start_date = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
end_date = now
try:
accounts = await self.firefly.get_accounts()
categories = await self.firefly.get_categories()
category_details = [
await self.firefly.get_category(
category_id=int(category.id), start=start_date, end=end_date
)
for category in categories
]
primary_currency = await self.firefly.get_currency_primary()
budgets = await self.firefly.get_budgets()
bills = await self.firefly.get_bills()
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except FireflyConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except FireflyTimeoutError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
return FireflyCoordinatorData(
accounts=accounts,
categories=categories,
category_details=category_details,
budgets=budgets,
bills=bills,
primary_currency=primary_currency,
)

View File

@@ -0,0 +1,40 @@
"""Base entity for Firefly III integration."""
from __future__ import annotations
from yarl import URL
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import FireflyDataUpdateCoordinator
class FireflyBaseEntity(CoordinatorEntity[FireflyDataUpdateCoordinator]):
"""Base class for Firefly III entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize a Firefly entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
configuration_url=URL(coordinator.config_entry.data[CONF_URL]),
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.entity_description.key}",
)
},
)

View File

@@ -0,0 +1,18 @@
{
"entity": {
"sensor": {
"account_type": {
"default": "mdi:bank",
"state": {
"expense": "mdi:cash-minus",
"revenue": "mdi:cash-plus",
"asset": "mdi:account-cash",
"liability": "mdi:hand-coin"
}
},
"category": {
"default": "mdi:label"
}
}
}
}

View File

@@ -0,0 +1,10 @@
{
"domain": "firefly_iii",
"name": "Firefly III",
"codeowners": ["@erwindouna"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.6"]
}

View File

@@ -0,0 +1,68 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates:
status: exempt
comment: |
No explicit parallel updates are defined.
reauthentication-flow:
status: todo
comment: |
No reauthentication flow is defined. It will be done in a next iteration.
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info: todo
discovery: todo
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -0,0 +1,133 @@
"""Sensor platform for Firefly III integration."""
from __future__ import annotations
from pyfirefly.models import Account, Category
from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.components.sensor.const import SensorDeviceClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FireflyConfigEntry, FireflyDataUpdateCoordinator
from .entity import FireflyBaseEntity
ACCOUNT_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="account_type",
translation_key="account",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
CATEGORY_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="category",
translation_key="category",
device_class=SensorDeviceClass.MONETARY,
state_class=SensorStateClass.TOTAL,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: FireflyConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Firefly III sensor platform."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = [
FireflyAccountEntity(
coordinator=coordinator,
entity_description=description,
account=account,
)
for account in coordinator.data.accounts
for description in ACCOUNT_SENSORS
]
entities.extend(
FireflyCategoryEntity(
coordinator=coordinator,
entity_description=description,
category=category,
)
for category in coordinator.data.category_details
for description in CATEGORY_SENSORS
)
async_add_entities(entities)
class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III account."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
account: Account,
) -> None:
"""Initialize Firefly account entity."""
super().__init__(coordinator, entity_description)
self._account = account
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{account.id}"
self._attr_name = account.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
# Account type state doesn't go well with the icons.json. Need to fix it.
if account.attributes.type == "expense":
self._attr_icon = "mdi:cash-minus"
elif account.attributes.type == "asset":
self._attr_icon = "mdi:account-cash"
elif account.attributes.type == "revenue":
self._attr_icon = "mdi:cash-plus"
elif account.attributes.type == "liability":
self._attr_icon = "mdi:hand-coin"
else:
self._attr_icon = "mdi:bank"
@property
def native_value(self) -> str | None:
"""Return the state of the sensor."""
return self._account.attributes.current_balance
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
"""Entity for Firefly III category."""
def __init__(
self,
coordinator: FireflyDataUpdateCoordinator,
entity_description: SensorEntityDescription,
category: Category,
) -> None:
"""Initialize Firefly category entity."""
super().__init__(coordinator, entity_description)
self._category = category
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{entity_description.key}_{category.id}"
self._attr_name = category.attributes.name
self._attr_native_unit_of_measurement = (
coordinator.data.primary_currency.attributes.code
)
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
spent_items = self._category.attributes.spent or []
earned_items = self._category.attributes.earned or []
spent = sum(float(item.sum) for item in spent_items if item.sum is not None)
earned = sum(float(item.sum) for item in earned_items if item.sum is not None)
if spent == 0 and earned == 0:
return None
return spent + earned

View File

@@ -0,0 +1,49 @@
{
"config": {
"step": {
"user": {
"data": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "[%key:common::config_flow::data::api_key%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"url": "[%key:common::config_flow::data::url%]",
"api_key": "The API key for authenticating with Firefly",
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
},
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
},
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "The new API access token for authenticating with Firefly III"
},
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
}
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while trying to connect to the Firefly instance: {error}"
},
"invalid_auth": {
"message": "An error occurred while trying to authenticate: {error}"
},
"timeout_connect": {
"message": "A timeout occurred while trying to connect to the Firefly instance: {error}"
}
}
}

View File

@@ -452,6 +452,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.app.router.register_resource(IndexView(repo_path, hass))
async_register_built_in_panel(hass, "light")
async_register_built_in_panel(hass, "security")
async_register_built_in_panel(hass, "climate")
async_register_built_in_panel(hass, "profile")
async_register_built_in_panel(
@@ -459,7 +463,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"developer-tools",
require_admin=True,
sidebar_title="developer_tools",
sidebar_icon="hass:hammer",
sidebar_icon="mdi:hammer",
)
@callback

View File

@@ -1,8 +1,10 @@
load_url:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
url:
example: "https://home-assistant.io"
required: true
@@ -10,10 +12,12 @@ load_url:
text:
set_config:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
key:
example: "motionSensitivity"
required: true
@@ -26,12 +30,14 @@ set_config:
text:
start_application:
target:
device:
integration: fully_kiosk
fields:
application:
example: "de.ozerov.fully"
required: true
selector:
text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,6 +147,10 @@
"name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": {
"name": "[%key:common::config_flow::data::url%]",
"description": "URL to load."
@@ -157,6 +161,10 @@
"name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": {
"name": "Key",
"description": "Configuration parameter to set."
@@ -174,6 +182,10 @@
"application": {
"name": "Application",
"description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
}
}
}

View File

@@ -108,6 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the humidifer,
@@ -140,6 +141,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -148,7 +150,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -186,11 +187,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -96,6 +96,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -35,6 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HEATER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the heater, but
@@ -67,6 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -75,7 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -113,11 +114,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -104,6 +104,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -620,6 +620,13 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
def create_generate_content_config(self) -> GenerateContentConfig:
"""Create the GenerateContentConfig for the LLM."""
options = self.subentry.data
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
thinking_config: ThinkingConfig | None = None
if model.startswith("models/gemini-2.5") and not model.endswith(
("tts", "image", "image-preview")
):
thinking_config = ThinkingConfig(include_thoughts=True)
return GenerateContentConfig(
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
top_k=options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
@@ -652,7 +659,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
),
),
],
thinking_config=ThinkingConfig(include_thoughts=True),
thinking_config=thinking_config,
)

View File

@@ -1,7 +1,5 @@
set_vacation:
target:
device:
integration: google_mail
entity:
integration: google_mail
fields:

View File

@@ -22,6 +22,7 @@ from google.protobuf import timestamp_pb2
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
@@ -91,6 +92,16 @@ def convert_time(time_str: str) -> timestamp_pb2.Timestamp | None:
return timestamp
SENSOR_DESCRIPTIONS = [
SensorEntityDescription(
key="duration",
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.MINUTES,
)
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
@@ -105,20 +116,20 @@ async def async_setup_entry(
client_options = ClientOptions(api_key=api_key)
client = RoutesAsyncClient(client_options=client_options)
sensor = GoogleTravelTimeSensor(
config_entry, name, api_key, origin, destination, client
)
sensors = [
GoogleTravelTimeSensor(
config_entry, name, api_key, origin, destination, client, sensor_description
)
for sensor_description in SENSOR_DESCRIPTIONS
]
async_add_entities([sensor], False)
async_add_entities(sensors, False)
class GoogleTravelTimeSensor(SensorEntity):
"""Representation of a Google travel time sensor."""
_attr_attribution = ATTRIBUTION
_attr_native_unit_of_measurement = UnitOfTime.MINUTES
_attr_device_class = SensorDeviceClass.DURATION
_attr_state_class = SensorStateClass.MEASUREMENT
def __init__(
self,
@@ -128,8 +139,10 @@ class GoogleTravelTimeSensor(SensorEntity):
origin: str,
destination: str,
client: RoutesAsyncClient,
sensor_description: SensorEntityDescription,
) -> None:
"""Initialize the sensor."""
self.entity_description = sensor_description
self._attr_name = name
self._attr_unique_id = config_entry.entry_id
self._attr_device_info = DeviceInfo(

View File

@@ -141,15 +141,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await hass.config_entries.async_forward_entry_setups(
entry, (entry.options["group_type"],)
)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -329,6 +329,7 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
@callback
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:

View File

@@ -1,14 +1,18 @@
"""The Growatt server PV inverter sensor integration."""
from collections.abc import Mapping
import logging
import growattServer
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_URL, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
from .const import (
AUTH_API_TOKEN,
AUTH_PASSWORD,
CONF_AUTH_TYPE,
CONF_PLANT_ID,
DEFAULT_PLANT_ID,
DEFAULT_URL,
@@ -19,36 +23,110 @@ from .const import (
from .coordinator import GrowattConfigEntry, GrowattCoordinator
from .models import GrowattRuntimeData
_LOGGER = logging.getLogger(__name__)
def get_device_list(
def get_device_list_classic(
api: growattServer.GrowattApi, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
"""Retrieve the device list for the selected plant."""
plant_id = config[CONF_PLANT_ID]
# Log in to api and fetch first plant if no plant id is defined.
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
if (
not login_response["success"]
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
):
raise ConfigEntryError("Username, Password or URL may be incorrect!")
try:
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
# DEBUG: Log the actual response structure
except Exception as ex:
_LOGGER.error("DEBUG - Login response: %s", login_response)
raise ConfigEntryError(
f"Error communicating with Growatt API during login: {ex}"
) from ex
if not login_response.get("success"):
msg = login_response.get("msg", "Unknown error")
_LOGGER.debug("Growatt login failed: %s", msg)
if msg == LOGIN_INVALID_AUTH_CODE:
raise ConfigEntryAuthFailed("Username, Password or URL may be incorrect!")
raise ConfigEntryError(f"Growatt login failed: {msg}")
user_id = login_response["user"]["id"]
if plant_id == DEFAULT_PLANT_ID:
plant_info = api.plant_list(user_id)
try:
plant_info = api.plant_list(user_id)
except Exception as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during plant list: {ex}"
) from ex
if not plant_info or "data" not in plant_info or not plant_info["data"]:
raise ConfigEntryError("No plants found for this account.")
plant_id = plant_info["data"][0]["plantId"]
# Get a list of devices for specified plant to add sensors for.
devices = api.device_list(plant_id)
try:
devices = api.device_list(plant_id)
except Exception as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during device list: {ex}"
) from ex
return devices, plant_id
def get_device_list_v1(
api, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
"""Device list logic for Open API V1.
Note: Plant selection (including auto-selection if only one plant exists)
is handled in the config flow before this function is called. This function
only fetches devices for the already-selected plant_id.
"""
plant_id = config[CONF_PLANT_ID]
try:
devices_dict = api.device_list(plant_id)
except growattServer.GrowattV1ApiError as e:
raise ConfigEntryError(
f"API error during device list: {e} (Code: {getattr(e, 'error_code', None)}, Message: {getattr(e, 'error_msg', None)})"
) from e
devices = devices_dict.get("devices", [])
# Only MIN device (type = 7) support implemented in current V1 API
supported_devices = [
{
"deviceSn": device.get("device_sn", ""),
"deviceType": "min",
}
for device in devices
if device.get("type") == 7
]
for device in devices:
if device.get("type") != 7:
_LOGGER.warning(
"Device %s with type %s not supported in Open API V1, skipping",
device.get("device_sn", ""),
device.get("type"),
)
return supported_devices, plant_id
def get_device_list(
api, config: Mapping[str, str], api_version: str
) -> tuple[list[dict[str, str]], str]:
"""Dispatch to correct device list logic based on API version."""
if api_version == "v1":
return get_device_list_v1(api, config)
if api_version == "classic":
return get_device_list_classic(api, config)
raise ConfigEntryError(f"Unknown API version: {api_version}")
async def async_setup_entry(
hass: HomeAssistant, config_entry: GrowattConfigEntry
) -> bool:
"""Set up Growatt from a config entry."""
config = config_entry.data
username = config[CONF_USERNAME]
url = config.get(CONF_URL, DEFAULT_URL)
# If the URL has been deprecated then change to the default instead
@@ -58,11 +136,24 @@ async def async_setup_entry(
new_data[CONF_URL] = url
hass.config_entries.async_update_entry(config_entry, data=new_data)
# Initialise the library with the username & a random id each time it is started
api = growattServer.GrowattApi(add_random_user_id=True, agent_identifier=username)
api.server_url = url
# Determine API version
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
api_version = "v1"
token = config[CONF_TOKEN]
api = growattServer.OpenApiV1(token=token)
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
api_version = "classic"
username = config[CONF_USERNAME]
api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=username
)
api.server_url = url
else:
raise ConfigEntryError("Unknown authentication type in config entry.")
devices, plant_id = await hass.async_add_executor_job(get_device_list, api, config)
devices, plant_id = await hass.async_add_executor_job(
get_device_list, api, config, api_version
)
# Create a coordinator for the total sensors
total_coordinator = GrowattCoordinator(
@@ -75,7 +166,7 @@ async def async_setup_entry(
hass, config_entry, device["deviceSn"], device["deviceType"], plant_id
)
for device in devices
if device["deviceType"] in ["inverter", "tlx", "storage", "mix"]
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min"]
}
# Perform the first refresh for the total coordinator

View File

@@ -1,22 +1,38 @@
"""Config flow for growatt server integration."""
import logging
from typing import Any
import growattServer
import requests
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
from homeassistant.const import (
CONF_NAME,
CONF_PASSWORD,
CONF_TOKEN,
CONF_URL,
CONF_USERNAME,
)
from homeassistant.core import callback
from .const import (
ABORT_NO_PLANTS,
AUTH_API_TOKEN,
AUTH_PASSWORD,
CONF_AUTH_TYPE,
CONF_PLANT_ID,
DEFAULT_URL,
DOMAIN,
ERROR_CANNOT_CONNECT,
ERROR_INVALID_AUTH,
LOGIN_INVALID_AUTH_CODE,
SERVER_URLS,
)
_LOGGER = logging.getLogger(__name__)
class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
"""Config flow class."""
@@ -27,12 +43,98 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialise growatt server flow."""
self.user_id = None
self.user_id: str | None = None
self.data: dict[str, Any] = {}
self.auth_type: str | None = None
self.plants: list[dict[str, Any]] = []
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the start of the config flow."""
return self.async_show_menu(
step_id="user",
menu_options=["password_auth", "token_auth"],
)
async def async_step_password_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle username/password authentication."""
if user_input is None:
return self._async_show_password_form()
self.auth_type = AUTH_PASSWORD
# Traditional username/password authentication
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
)
self.api.server_url = user_input[CONF_URL]
try:
login_response = await self.hass.async_add_executor_job(
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
)
except requests.exceptions.RequestException as ex:
_LOGGER.error("Network error during Growatt API login: %s", ex)
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
except (ValueError, KeyError, TypeError, AttributeError) as ex:
_LOGGER.error("Invalid response format during login: %s", ex)
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
if (
not login_response["success"]
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
):
return self._async_show_password_form({"base": ERROR_INVALID_AUTH})
self.user_id = login_response["user"]["id"]
self.data = user_input
self.data[CONF_AUTH_TYPE] = self.auth_type
return await self.async_step_plant()
async def async_step_token_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle API token authentication."""
if user_input is None:
return self._async_show_token_form()
self.auth_type = AUTH_API_TOKEN
# Using token authentication
token = user_input[CONF_TOKEN]
self.api = growattServer.OpenApiV1(token=token)
# Verify token by fetching plant list
try:
plant_response = await self.hass.async_add_executor_job(self.api.plant_list)
self.plants = plant_response.get("plants", [])
except requests.exceptions.RequestException as ex:
_LOGGER.error("Network error during Growatt V1 API plant list: %s", ex)
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
except growattServer.GrowattV1ApiError as e:
_LOGGER.error(
"Growatt V1 API error: %s (Code: %s)",
e.error_msg or str(e),
getattr(e, "error_code", None),
)
return self._async_show_token_form({"base": ERROR_INVALID_AUTH})
except (ValueError, KeyError, TypeError, AttributeError) as ex:
_LOGGER.error(
"Invalid response format during Growatt V1 API plant list: %s", ex
)
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
self.data = user_input
self.data[CONF_AUTH_TYPE] = self.auth_type
return await self.async_step_plant()
@callback
def _async_show_user_form(self, errors=None):
"""Show the form to the user."""
def _async_show_password_form(
self, errors: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Show the username/password form to the user."""
data_schema = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
@@ -42,58 +144,87 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
)
return self.async_show_form(
step_id="user", data_schema=data_schema, errors=errors
step_id="password_auth", data_schema=data_schema, errors=errors
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@callback
def _async_show_token_form(
self, errors: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the start of the config flow."""
if not user_input:
return self._async_show_user_form()
# Initialise the library with the username & a random id each time it is started
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
)
self.api.server_url = user_input[CONF_URL]
login_response = await self.hass.async_add_executor_job(
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
"""Show the API token form to the user."""
data_schema = vol.Schema(
{
vol.Required(CONF_TOKEN): str,
}
)
if (
not login_response["success"]
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
):
return self._async_show_user_form({"base": "invalid_auth"})
self.user_id = login_response["user"]["id"]
self.data = user_input
return await self.async_step_plant()
return self.async_show_form(
step_id="token_auth",
data_schema=data_schema,
errors=errors,
)
async def async_step_plant(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle adding a "plant" to Home Assistant."""
plant_info = await self.hass.async_add_executor_job(
self.api.plant_list, self.user_id
)
if self.auth_type == AUTH_API_TOKEN:
# Using V1 API with token
if not self.plants:
return self.async_abort(reason=ABORT_NO_PLANTS)
if not plant_info["data"]:
return self.async_abort(reason="no_plants")
# Create dictionary of plant_id -> name
plant_dict = {
str(plant["plant_id"]): plant.get("name", "Unknown Plant")
for plant in self.plants
}
plants = {plant["plantId"]: plant["plantName"] for plant in plant_info["data"]}
if user_input is None and len(plant_dict) > 1:
data_schema = vol.Schema(
{vol.Required(CONF_PLANT_ID): vol.In(plant_dict)}
)
return self.async_show_form(step_id="plant", data_schema=data_schema)
if user_input is None and len(plant_info["data"]) > 1:
data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
if user_input is None:
# Single plant => mark it as selected
user_input = {CONF_PLANT_ID: list(plant_dict.keys())[0]}
return self.async_show_form(step_id="plant", data_schema=data_schema)
user_input[CONF_NAME] = plant_dict[user_input[CONF_PLANT_ID]]
if user_input is None:
# single plant => mark it as selected
user_input = {CONF_PLANT_ID: plant_info["data"][0]["plantId"]}
else:
# Traditional API
try:
plant_info = await self.hass.async_add_executor_job(
self.api.plant_list, self.user_id
)
except requests.exceptions.RequestException as ex:
_LOGGER.error("Network error during Growatt API plant list: %s", ex)
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
# Access plant_info["data"] - validate response structure
if not isinstance(plant_info, dict) or "data" not in plant_info:
_LOGGER.error(
"Invalid response format during plant list: missing 'data' key"
)
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
plant_data = plant_info["data"]
if not plant_data:
return self.async_abort(reason=ABORT_NO_PLANTS)
plants = {plant["plantId"]: plant["plantName"] for plant in plant_data}
if user_input is None and len(plant_data) > 1:
data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
return self.async_show_form(step_id="plant", data_schema=data_schema)
if user_input is None:
# single plant => mark it as selected
user_input = {CONF_PLANT_ID: plant_data[0]["plantId"]}
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
await self.async_set_unique_id(user_input[CONF_PLANT_ID])
self._abort_if_unique_id_configured()
self.data.update(user_input)

View File

@@ -4,6 +4,16 @@ from homeassistant.const import Platform
CONF_PLANT_ID = "plant_id"
# API key support
CONF_API_KEY = "api_key"
# Auth types for config flow
AUTH_PASSWORD = "password"
AUTH_API_TOKEN = "api_token"
CONF_AUTH_TYPE = "auth_type"
DEFAULT_AUTH_TYPE = AUTH_PASSWORD
DEFAULT_PLANT_ID = "0"
DEFAULT_NAME = "Growatt"
@@ -29,3 +39,10 @@ DOMAIN = "growatt_server"
PLATFORMS = [Platform.SENSOR]
LOGIN_INVALID_AUTH_CODE = "502"
# Config flow error types (also used as abort reasons)
ERROR_CANNOT_CONNECT = "cannot_connect" # Used for both form errors and aborts
ERROR_INVALID_AUTH = "invalid_auth"
# Config flow abort reasons
ABORT_NO_PLANTS = "no_plants"

View File

@@ -1,5 +1,7 @@
"""Coordinator module for managing Growatt data fetching."""
from __future__ import annotations
import datetime
import json
import logging
@@ -38,23 +40,31 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
plant_id: str,
) -> None:
"""Initialize the coordinator."""
self.username = config_entry.data[CONF_USERNAME]
self.password = config_entry.data[CONF_PASSWORD]
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=self.username
self.api_version = (
"v1" if config_entry.data.get("auth_type") == "api_token" else "classic"
)
# Set server URL
self.api.server_url = self.url
self.device_id = device_id
self.device_type = device_type
self.plant_id = plant_id
# Initialize previous_values to store historical data
self.previous_values: dict[str, Any] = {}
if self.api_version == "v1":
self.username = None
self.password = None
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
self.token = config_entry.data["token"]
self.api = growattServer.OpenApiV1(token=self.token)
elif self.api_version == "classic":
self.username = config_entry.data.get(CONF_USERNAME)
self.password = config_entry.data[CONF_PASSWORD]
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
self.api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=self.username
)
self.api.server_url = self.url
else:
raise ValueError(f"Unknown API version: {self.api_version}")
super().__init__(
hass,
_LOGGER,
@@ -67,21 +77,54 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Update data via library synchronously."""
_LOGGER.debug("Updating data for %s (%s)", self.device_id, self.device_type)
# Login in to the Growatt server
self.api.login(self.username, self.password)
# login only required for classic API
if self.api_version == "classic":
self.api.login(self.username, self.password)
if self.device_type == "total":
total_info = self.api.plant_info(self.device_id)
del total_info["deviceList"]
plant_money_text, currency = total_info["plantMoneyText"].split("/")
total_info["plantMoneyText"] = plant_money_text
total_info["currency"] = currency
if self.api_version == "v1":
# The V1 Plant APIs do not provide the same information as the classic plant_info() API
# More specifically:
# 1. There is no monetary information to be found, so today and lifetime money is not available
# 2. There is no nominal power, this is provided by inverter min_energy()
# This means, for the total coordinator we can only fetch and map the following:
# todayEnergy -> today_energy
# totalEnergy -> total_energy
# invTodayPpv -> current_power
total_info = self.api.plant_energy_overview(self.plant_id)
total_info["todayEnergy"] = total_info["today_energy"]
total_info["totalEnergy"] = total_info["total_energy"]
total_info["invTodayPpv"] = total_info["current_power"]
else:
# Classic API: use plant_info as before
total_info = self.api.plant_info(self.device_id)
del total_info["deviceList"]
plant_money_text, currency = total_info["plantMoneyText"].split("/")
total_info["plantMoneyText"] = plant_money_text
total_info["currency"] = currency
_LOGGER.debug("Total info for plant %s: %r", self.plant_id, total_info)
self.data = total_info
elif self.device_type == "inverter":
self.data = self.api.inverter_detail(self.device_id)
elif self.device_type == "min":
# Open API V1: min device
try:
min_details = self.api.min_detail(self.device_id)
min_settings = self.api.min_settings(self.device_id)
min_energy = self.api.min_energy(self.device_id)
except growattServer.GrowattV1ApiError as err:
_LOGGER.error(
"Error fetching min device data for %s: %s", self.device_id, err
)
raise UpdateFailed(f"Error fetching min device data: {err}") from err
min_info = {**min_details, **min_settings, **min_energy}
self.data = min_info
_LOGGER.debug("min_info for device %s: %r", self.device_id, min_info)
elif self.device_type == "tlx":
tlx_info = self.api.tlx_detail(self.device_id)
self.data = tlx_info["data"]
_LOGGER.debug("tlx_info for device %s: %r", self.device_id, tlx_info)
elif self.device_type == "storage":
storage_info_detail = self.api.storage_params(self.device_id)
storage_energy_overview = self.api.storage_energy_overview(
@@ -145,7 +188,7 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return self.data.get("currency")
def get_data(
self, entity_description: "GrowattSensorEntityDescription"
self, entity_description: GrowattSensorEntityDescription
) -> str | int | float | None:
"""Get the data."""
variable = entity_description.api_key

View File

@@ -51,7 +51,7 @@ async def async_setup_entry(
sensor_descriptions: list = []
if device_coordinator.device_type == "inverter":
sensor_descriptions = list(INVERTER_SENSOR_TYPES)
elif device_coordinator.device_type == "tlx":
elif device_coordinator.device_type in ("tlx", "min"):
sensor_descriptions = list(TLX_SENSOR_TYPES)
elif device_coordinator.device_type == "storage":
sensor_descriptions = list(STORAGE_SENSOR_TYPES)

View File

@@ -2,26 +2,42 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_plants": "No plants have been found on this account"
},
"error": {
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
"invalid_auth": "Authentication failed. Please check your credentials and try again.",
"cannot_connect": "Cannot connect to Growatt servers. Please check your internet connection and try again."
},
"step": {
"user": {
"title": "Choose authentication method",
"description": "Note: API Token authentication is currently only supported for MIN/TLX devices. For other device types, please use Username & Password authentication.",
"menu_options": {
"password_auth": "Username & Password",
"token_auth": "API Token (MIN/TLX only)"
}
},
"password_auth": {
"title": "Enter your Growatt login credentials",
"data": {
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]",
"url": "[%key:common::config_flow::data::url%]"
}
},
"token_auth": {
"title": "Enter your API token",
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
"data": {
"token": "API Token"
}
},
"plant": {
"data": {
"plant_id": "Plant"
},
"title": "Select your plant"
},
"user": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]",
"url": "[%key:common::config_flow::data::url%]"
},
"title": "Enter your Growatt information"
}
}
},

View File

@@ -4,9 +4,14 @@ from uuid import UUID
from habiticalib import Habitica
from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
entity_registry as er,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
@@ -27,6 +32,7 @@ PLATFORMS = [
Platform.BUTTON,
Platform.CALENDAR,
Platform.IMAGE,
Platform.NOTIFY,
Platform.SENSOR,
Platform.SWITCH,
Platform.TODO,
@@ -46,6 +52,7 @@ async def async_setup_entry(
"""Set up habitica from a config entry."""
party_added_by_this_entry: UUID | None = None
device_reg = dr.async_get(hass)
entity_registry = er.async_get(hass)
session = async_get_clientsession(
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
@@ -96,6 +103,15 @@ async def async_setup_entry(
device.id, remove_config_entry_id=config_entry.entry_id
)
notify_entities = [
entry.entity_id
for entry in entity_registry.entities.values()
if entry.domain == NOTIFY_DOMAIN
and entry.config_entry_id == config_entry.entry_id
]
for entity_id in notify_entities:
entity_registry.async_remove(entity_id)
hass.config_entries.async_schedule_reload(config_entry.entry_id)
coordinator.async_add_listener(_party_update_listener)

View File

@@ -121,4 +121,4 @@ class HabiticaPartyBinarySensorEntity(HabiticaPartyBase, BinarySensorEntity):
@property
def is_on(self) -> bool | None:
"""If the binary sensor is on."""
return self.coordinator.data.quest.active
return self.coordinator.data.party.quest.active

View File

@@ -9,6 +9,7 @@ from datetime import timedelta
from io import BytesIO
import logging
from typing import Any
from uuid import UUID
from aiohttp import ClientError
from habiticalib import (
@@ -48,6 +49,14 @@ class HabiticaData:
tasks: list[TaskData]
@dataclass
class HabiticaPartyData:
"""Habitica party data."""
party: GroupData
members: dict[UUID, UserData]
type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator]
@@ -192,11 +201,19 @@ class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
return png.getvalue()
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[GroupData]):
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[HabiticaPartyData]):
"""Habitica Party Coordinator."""
_update_interval = timedelta(minutes=15)
async def _update_data(self) -> GroupData:
async def _update_data(self) -> HabiticaPartyData:
"""Fetch the latest party data."""
return (await self.habitica.get_group()).data
return HabiticaPartyData(
party=(await self.habitica.get_group()).data,
members={
member.id: member
for member in (await self.habitica.get_group_members()).data
if member.id
},
)

View File

@@ -68,14 +68,14 @@ class HabiticaPartyBase(CoordinatorEntity[HabiticaPartyCoordinator]):
super().__init__(coordinator)
if TYPE_CHECKING:
assert config_entry.unique_id
unique_id = f"{config_entry.unique_id}_{coordinator.data.id!s}"
unique_id = f"{config_entry.unique_id}_{coordinator.data.party.id!s}"
self.entity_description = entity_description
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
model=NAME,
name=coordinator.data.summary,
name=coordinator.data.party.summary,
identifiers={(DOMAIN, unique_id)},
via_device=(DOMAIN, config_entry.unique_id),
)

View File

@@ -174,6 +174,9 @@
},
"collected_items": {
"default": "mdi:sack"
},
"last_checkin": {
"default": "mdi:login-variant"
}
},
"switch": {
@@ -194,6 +197,11 @@
"quest_running": {
"default": "mdi:script-text-play"
}
},
"notify": {
"party_chat": {
"default": "mdi:forum"
}
}
},
"services": {

View File

@@ -128,7 +128,7 @@ class HabiticaPartyImage(HabiticaPartyBase, ImageEntity):
"""Return URL of image."""
return (
f"{ASSETS_URL}quest_{key}.png"
if (key := self.coordinator.data.quest.key)
if (key := self.coordinator.data.party.quest.key)
else None
)

View File

@@ -0,0 +1,202 @@
"""Notify platform for the Habitica integration."""
from __future__ import annotations
from abc import abstractmethod
from enum import StrEnum
from typing import TYPE_CHECKING
from uuid import UUID
from aiohttp import ClientError
from habiticalib import (
GroupData,
HabiticaException,
NotAuthorizedError,
NotFoundError,
TooManyRequestsError,
UserData,
)
from homeassistant.components.notify import (
DOMAIN as NOTIFY_DOMAIN,
NotifyEntity,
NotifyEntityDescription,
)
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HABITICA_KEY
from .const import DOMAIN
from .coordinator import HabiticaConfigEntry, HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
PARALLEL_UPDATES = 10
class HabiticaNotify(StrEnum):
"""Habitica Notifier."""
PARTY_CHAT = "party_chat"
PRIVATE_MESSAGE = "private_message"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HabiticaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the notify entity platform."""
members_added: set[UUID] = set()
entity_registry = er.async_get(hass)
coordinator = config_entry.runtime_data
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
async_add_entities(
[HabiticaPartyChatNotifyEntity(coordinator, party_coordinator.data.party)]
)
@callback
def add_entities() -> None:
nonlocal members_added
new_members = set(party_coordinator.data.members.keys()) - members_added
if TYPE_CHECKING:
assert coordinator.data.user.id
new_members.discard(coordinator.data.user.id)
if new_members:
async_add_entities(
HabiticaPrivateMessageNotifyEntity(
coordinator, party_coordinator.data.members[member]
)
for member in new_members
)
members_added |= new_members
delete_members = members_added - set(party_coordinator.data.members.keys())
for member in delete_members:
if entity_id := entity_registry.async_get_entity_id(
NOTIFY_DOMAIN,
DOMAIN,
f"{coordinator.config_entry.unique_id}_{member!s}_{HabiticaNotify.PRIVATE_MESSAGE}",
):
entity_registry.async_remove(entity_id)
members_added.discard(member)
party_coordinator.async_add_listener(add_entities)
add_entities()
class HabiticaBaseNotifyEntity(HabiticaBase, NotifyEntity):
"""Habitica base notify entity."""
def __init__(
self,
coordinator: HabiticaDataUpdateCoordinator,
) -> None:
"""Initialize a Habitica entity."""
super().__init__(coordinator, self.entity_description)
@abstractmethod
async def _send_message(self, message: str) -> None:
"""Send a Habitica message."""
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a message."""
try:
await self._send_message(message)
except NotAuthorizedError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="send_message_forbidden",
translation_placeholders={
**self.translation_placeholders,
"reason": e.error.message,
},
) from e
except NotFoundError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="send_message_not_found",
translation_placeholders={
**self.translation_placeholders,
"reason": e.error.message,
},
) from e
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
translation_placeholders={"retry_after": str(e.retry_after)},
) from e
except HabiticaException as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": e.error.message},
) from e
except ClientError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e)},
) from e
class HabiticaPartyChatNotifyEntity(HabiticaBaseNotifyEntity):
"""Representation of a Habitica party chat notify entity."""
def __init__(
self,
coordinator: HabiticaDataUpdateCoordinator,
party: GroupData,
) -> None:
"""Initialize a Habitica entity."""
self._attr_translation_placeholders = {CONF_NAME: party.name}
self.entity_description = NotifyEntityDescription(
key=HabiticaNotify.PARTY_CHAT,
translation_key=HabiticaNotify.PARTY_CHAT,
)
self.party = party
super().__init__(coordinator)
async def _send_message(self, message: str) -> None:
"""Send a Habitica party chat message."""
await self.coordinator.habitica.send_group_message(
message=message,
group_id=self.party.id,
)
class HabiticaPrivateMessageNotifyEntity(HabiticaBaseNotifyEntity):
"""Representation of a Habitica private message notify entity."""
def __init__(
self,
coordinator: HabiticaDataUpdateCoordinator,
member: UserData,
) -> None:
"""Initialize a Habitica entity."""
self._attr_translation_placeholders = {CONF_NAME: member.profile.name or ""}
self.entity_description = NotifyEntityDescription(
key=f"{member.id!s}_{HabiticaNotify.PRIVATE_MESSAGE}",
translation_key=HabiticaNotify.PRIVATE_MESSAGE,
)
self.member = member
super().__init__(coordinator)
async def _send_message(self, message: str) -> None:
"""Send a Habitica private message."""
if TYPE_CHECKING:
assert self.member.id
await self.coordinator.habitica.send_private_message(
message=message,
to_user_id=self.member.id,
)

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from enum import StrEnum
import logging
from typing import Any
@@ -53,7 +54,7 @@ PARALLEL_UPDATES = 1
class HabiticaSensorEntityDescription(SensorEntityDescription):
"""Habitica Sensor Description."""
value_fn: Callable[[UserData, ContentData], StateType]
value_fn: Callable[[UserData, ContentData], StateType | datetime]
attributes_fn: Callable[[UserData, ContentData], dict[str, Any] | None] | None = (
None
)
@@ -114,6 +115,7 @@ class HabiticaSensorEntity(StrEnum):
COLLECTED_ITEMS = "collected_items"
BOSS_RAGE = "boss_rage"
BOSS_RAGE_LIMIT = "boss_rage_limit"
LAST_CHECKIN = "last_checkin"
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
@@ -284,6 +286,16 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
translation_key=HabiticaSensorEntity.PENDING_QUEST_ITEMS,
value_fn=pending_quest_items,
),
HabiticaSensorEntityDescription(
key=HabiticaSensorEntity.LAST_CHECKIN,
translation_key=HabiticaSensorEntity.LAST_CHECKIN,
value_fn=(
lambda user, _: dt_util.as_local(last)
if (last := user.auth.timestamps.loggedin)
else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
)
@@ -399,7 +411,7 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
entity_description: HabiticaSensorEntityDescription
@property
def native_value(self) -> StateType:
def native_value(self) -> StateType | datetime:
"""Return the state of the device."""
return self.entity_description.value_fn(
@@ -442,10 +454,12 @@ class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
entity_description: HabiticaPartySensorEntityDescription
@property
def native_value(self) -> StateType:
def native_value(self) -> StateType | datetime:
"""Return the state of the device."""
return self.entity_description.value_fn(self.coordinator.data, self.content)
return self.entity_description.value_fn(
self.coordinator.data.party, self.content
)
@property
def entity_picture(self) -> str | None:
@@ -453,7 +467,9 @@ class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
pic = self.entity_description.entity_picture
entity_picture = (
pic if isinstance(pic, str) or pic is None else pic(self.coordinator.data)
pic
if isinstance(pic, str) or pic is None
else pic(self.coordinator.data.party)
)
return (
@@ -468,5 +484,5 @@ class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return entity specific state attributes."""
if func := self.entity_description.attributes_fn:
return func(self.coordinator.data, self.content)
return func(self.coordinator.data.party, self.content)
return None

View File

@@ -264,6 +264,14 @@
"name": "[%key:component::habitica::common::quest_name%]"
}
},
"notify": {
"party_chat": {
"name": "Party chat"
},
"private_message": {
"name": "Private message: {name}"
}
},
"sensor": {
"display_name": {
"name": "Display name",
@@ -282,6 +290,9 @@
}
}
},
"last_checkin": {
"name": "Last check-in"
},
"health": {
"name": "Health",
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
@@ -572,6 +583,12 @@
},
"frequency_not_monthly": {
"message": "Unable to update task, monthly repeat settings apply only to monthly recurring dailies."
},
"send_message_forbidden": {
"message": "You are not allowed to send messages to {name}. ({reason})"
},
"send_message_not_found": {
"message": "Unable to send message, {name} not found. ({reason})"
}
},
"issues": {

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/hassio",
"iot_class": "local_polling",
"quality_scale": "internal",
"requirements": ["aiohasupervisor==0.3.3b0"],
"requirements": ["aiohasupervisor==0.3.3"],
"single_config_entry": true
}

View File

@@ -73,7 +73,6 @@ class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
try:
await supervisor_client.addons.start_addon(self._addon_slug)
except SupervisorError as err:
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
raise HomeAssistantError(err) from err
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)

View File

@@ -46,7 +46,7 @@ CONFIG_SCHEMA = vol.Schema(
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the history hooks."""
hass.http.register_view(HistoryPeriodView())
frontend.async_register_built_in_panel(hass, "history", "history", "hass:chart-box")
frontend.async_register_built_in_panel(hass, "history", "history", "mdi:chart-box")
websocket_api.async_setup(hass)
return True

View File

@@ -65,6 +65,7 @@ async def async_setup_entry(
entry,
options={**entry.options, CONF_ENTITY_ID: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
async def source_entity_removed() -> None:
# The source entity has been removed, we remove the config entry because
@@ -86,7 +87,6 @@ async def async_setup_entry(
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -130,8 +130,3 @@ async def async_unload_entry(
) -> bool:
"""Unload History stats config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -162,6 +162,7 @@ class HistoryStatsConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -22,6 +22,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.19.0"],
"requirements": ["aiohomeconnect==0.20.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -32,15 +32,12 @@ set_location:
stop:
toggle:
target:
entity: {}
turn_on:
target:
entity: {}
turn_off:
target:
entity: {}
update_entity:
fields:
@@ -53,8 +50,6 @@ update_entity:
reload_custom_templates:
reload_config_entry:
target:
entity: {}
device: {}
fields:
entry_id:
advanced: true

View File

@@ -67,11 +67,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
context: ConfigFlowContext
# `rts_dtr` targets older adapters, `baudrate` works for newer ones. The reason we
# try them in this order is that on older adapters `baudrate` entered the ESP32-S3
# bootloader instead of the MG24 bootloader.
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
async def async_step_install_zigbee_firmware(
self, user_input: dict[str, Any] | None = None

View File

@@ -157,7 +157,7 @@ async def async_setup_entry(
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
"""Connect ZBT-2 firmware update entity."""
bootloader_reset_methods = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
bootloader_reset_methods = [ResetTarget.RTS_DTR]
def __init__(
self,

View File

@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
"integration_type": "system",
"requirements": [
"universal-silabs-flasher==0.0.34",
"universal-silabs-flasher==0.0.35",
"ha-silabs-firmware-client==0.2.0"
]
}

Some files were not shown because too many files have changed in this diff Show More