Compare commits

..

172 Commits

Author SHA1 Message Date
Erik
34aff535fd Remove fix for chained steps decorated with progress_step 2025-11-06 12:19:39 +01:00
Erik
3b0f49e2ea Reapply "Fix progress step recursion (#153906)" (#155866)
This reverts commit a4c0a9b3a5.
2025-11-06 12:08:56 +01:00
Artur Pragacz
60014b6530 Rename misspelled service python files (#155909) 2025-11-06 09:59:45 +01:00
Erik Montnemery
3b57cab6b4 Revert "Allow opening sockets in logbook tests" (#155899) 2025-11-06 09:20:28 +01:00
Erik Montnemery
967467664b Disable automatic start of HTTP server in tests (#155857) 2025-11-06 08:37:04 +01:00
alexqzd
b87b5cffd8 SmartThings: Expose the entity to control the AC unit beep (#151546) 2025-11-06 07:55:51 +01:00
Artur Pragacz
bb44987af1 Clear dynamic encryption key in ESPHome on remove (#155858) 2025-11-06 02:11:32 +01:00
Christopher Fenner
8d3ef2b224 Add icons for presets in ViCare ventilation entity (#155845) 2025-11-05 20:57:02 +01:00
wollew
5e409295f9 velux: add one more missing data_description (#155854) 2025-11-05 20:56:19 +01:00
J. Nick Koston
530c189f9c Add Bluetooth WiFi provisioning for Shelly (#155822) 2025-11-05 13:20:24 -06:00
giuseppeg88
f05fef9588 Add bad code attempt event to manual alarm control panel (#146315)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-11-05 18:15:27 +00:00
Paulus Schoutsen
a257b5c54c Rename DALI Center to Sunricher DALI (#155865)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-11-05 19:15:07 +01:00
puddly
5b9f7372fc Allow hardware integrations to specify TX power for ZHA (#155855)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-11-05 19:13:54 +01:00
puddly
a4c0a9b3a5 Revert "Fix progress step recursion (#153906)" (#155866) 2025-11-05 18:46:39 +01:00
Bram Kragten
7d65b4c941 Update frontend to 20251105.0 (#155853) 2025-11-05 16:32:06 +01:00
Martin Hjelmare
abd0ee7bce Fix progress step recursion (#153906) 2025-11-05 15:48:35 +01:00
Will Moss
9e3eb20a04 Fix account link no internet on startup (#154579)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-11-05 15:23:20 +01:00
Erik Montnemery
6dc655c3b4 Allow opening sockets in logbook tests (#155840) 2025-11-05 14:58:21 +01:00
Maciej Bieniek
9f595a94fb Check if the Brother printer serial number matches (#155842) 2025-11-05 14:15:46 +01:00
Lukas
5dc215a143 Bump python-pooldose to 0.7.8 (#155307)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2025-11-05 13:04:49 +00:00
starkillerOG
306b78ba5f Bring Reolink test coverage back to 100% (#155839) 2025-11-05 12:22:44 +01:00
Erik Montnemery
bccb646a07 Create issue to warn against using http.server_host in supervised installs (#155837) 2025-11-05 12:13:56 +01:00
Christopher Fenner
4a5dc8cdd6 Add labels to selector in AndroidTV config flow (#155660) 2025-11-05 12:05:58 +01:00
Erik Montnemery
52a751507a Revert "Deprecate http.server_host option and raise issue if used" (#155834) 2025-11-05 11:26:14 +01:00
wollew
533b9f969d velux: add missing data_descriptions in config flow (#155832) 2025-11-05 11:25:07 +01:00
G Johansson
5de7928bc0 Fix sentence casing in smhi (#155831) 2025-11-05 11:24:52 +01:00
epenet
aad9b07f86 Simplify tuya sensor code (#155835) 2025-11-05 11:24:06 +01:00
Tom Matheussen
3e2c401253 Allow multiple config entries for Satel Integra (#155833) 2025-11-05 11:21:56 +01:00
Bouwe Westerdijk
762e63d042 Bugfix: implement RestoreState and bump backend for Plugwise climate (#155126) 2025-11-05 11:18:15 +01:00
puddly
ec6d40a51c Add progress to ZHA migration steps (#155764)
Co-authored-by: TheJulianJES <TheJulianJES@users.noreply.github.com>
2025-11-05 11:10:10 +01:00
Erik Montnemery
47c2c61626 Deprecate http.server_host option and raise issue if used (#155828) 2025-11-05 11:08:49 +01:00
Erik Montnemery
73c941f6c5 Fix ESPHome config entry unload (#155830) 2025-11-05 10:32:29 +01:00
epenet
685edb5f76 Add Tuya test fixtures for cz category (#155827) 2025-11-05 09:54:27 +01:00
G Johansson
5987b6dcb9 Improve code formatting in System monitor (#155800) 2025-11-04 22:09:04 -08:00
Oliver Gründel
cb029e0bb0 Remove state class for rolling window in ecowitt (#155812) 2025-11-04 22:06:15 -08:00
steinmn
553ec35947 Set LG Thinq energy sensor state_class as total_increasing (#155816) 2025-11-04 22:01:38 -08:00
G Johansson
f93940bfa9 Revert "Make influxdb batch settings configurable" (#155808) 2025-11-04 22:00:02 -08:00
Foscam-wangzhengyu
486f93eb28 Bump libpyfoscamcgi to 0.0.9 (#155824) 2025-11-04 21:58:24 -08:00
cdnninja
462db36fef add update platform to vesync (#154915) 2025-11-04 21:40:35 -08:00
Nathan Spencer
485f7f45e8 Bump pylitterbot to 2025.0.0 (#155821) 2025-11-04 18:03:24 -08:00
G Johansson
a446d8a98c Add fire sensors to smhi (#153224) 2025-11-04 17:37:32 -08:00
J. Nick Koston
b4a31fc578 Bump aioshelly to 13.16.0 (#155813) 2025-11-04 22:20:00 +01:00
G Johansson
22321c22cc Bump holidays to 0.84 (#155802) 2025-11-04 22:18:02 +01:00
TheJulianJES
4419c236e2 Add ZHA migration retry steps for unplugged adapters (#155537) 2025-11-04 20:34:51 +01:00
Maciej Bieniek
1731a2534c Implement base entity class for Brother integration (#155714)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-04 20:28:52 +01:00
Bram Kragten
ec0edf47b1 Update frontend to 20251104.0 (#155799) 2025-11-04 14:08:34 -05:00
Tom Matheussen
57c69738e3 Migrate Satel Integra entities unique_id to use config flow entry_id (#154187) 2025-11-04 20:03:08 +01:00
Robert Resch
fb1f258b2b Readd deprecated archs to build wheels (#155792) 2025-11-04 19:30:19 +01:00
puddly
d419dd0c05 Fix non-unique ZHA serial port paths and migrate USB integration to always list unique paths (#155019) 2025-11-04 11:42:56 -05:00
Paul Bottein
65960aa3f7 Rename safety panel to security panel (#155795) 2025-11-04 17:23:39 +01:00
Marc Mueller
a25afe2834 Fix hassio test RuntimeWarning (#155787) 2025-11-04 17:15:20 +01:00
Marc Mueller
4cdfa3bddb Add mkdocs and sphinx to forbidden packages (#155781) 2025-11-04 17:08:33 +01:00
Erwin Douna
9e7bef9fa7 Bump pyportainer 1.0.13 (#155783) 2025-11-04 16:38:27 +01:00
Marc Mueller
68a1b1f91f Fix hassio tests (#155791) 2025-11-04 16:09:47 +01:00
ekutner
1659ca532d Add retry and error logging if communication with the CoolMaster device fails (#148699) 2025-11-04 14:57:32 +01:00
OzGav
8ea16daae4 Correctly map repeat mode in Music Assistant (#155777) 2025-11-04 14:07:15 +01:00
OzGav
5bd89acf9a Use typed config entry in Music Assistant (#155778) 2025-11-04 14:05:44 +01:00
starkillerOG
2b8db74be4 Bump reolink-aio to 0.16.4 (#155776) 2025-11-04 14:03:44 +01:00
krahabb
d7f9a7114d Deprecate TemperatureConverter.convert_interval (#155689)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Erik Montnemery <erik@montnemery.com>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-11-04 13:40:41 +01:00
Marc Mueller
f7a59eb86e Sort hassio strings (#155784) 2025-11-04 13:34:57 +01:00
Manu
37eef965ad Add friend count sensor to Xbox integration (#155761) 2025-11-04 11:27:48 +01:00
Amit Finkelstein
b706430e66 Add binary sensor for HassOS share mount status (#149197) 2025-11-04 11:14:10 +01:00
Fredrik Mårtensson
5012aa5cb0 Catch exception from libsoundtouch if device not available (#155749)
Co-authored-by: Robert Resch <robert@resch.dev>
2025-11-04 10:24:38 +01:00
karwosts
1c5f7adf4e Fix Ambient Weather incorrect state classes (#155751) 2025-11-04 09:35:08 +01:00
Manu
ff364e3913 Add support for multiple entries to Xbox integration (#155771) 2025-11-04 09:00:40 +01:00
jgaalen
0e2a4605ff Make influxdb batch settings configurable (#134758)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-11-04 08:33:21 +01:00
cdnninja
ca5b9ce0d3 Correct Vesync Humidifier Mode (#155638) 2025-11-03 22:44:19 -08:00
Brett Adams
953196ec21 Bump Tesla Fleet API to v1.2.5 (#155763) 2025-11-03 22:15:34 -08:00
Kamil Breguła
b5be3d5ac3 Use data_description in config_flow for WLED (#155572)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-03 22:07:15 -08:00
puddly
5d9e8287d3 Bump ZHA to 0.0.77 (#155766) 2025-11-03 21:40:15 -08:00
Matt Zimmerman
dc291708ae Update python-smarttub to 0.0.45 (#155768) 2025-11-03 21:39:54 -08:00
Paulus Schoutsen
257e82fe4e Add multiple selection to media selector (#154350) 2025-11-04 01:44:31 +01:00
starkillerOG
ab6d4d645e Add Reolink audio noise reduction number entity (#155757)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2025-11-03 22:41:56 +00:00
starkillerOG
58ebd84326 Add Reolink exposure mode select entity (#155759) 2025-11-03 23:17:52 +01:00
J. Nick Koston
76b24dafed Bump aioesphomeapi to 42.6.0 (#155728) 2025-11-03 22:04:05 +00:00
Kamil Breguła
431f563ff6 Add translation of exceptions in WLED (#155570)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-11-03 22:59:08 +01:00
starkillerOG
e308e610c6 Add Reolink PIR interval number entity (#155758) 2025-11-03 21:53:07 +00:00
Christopher Fenner
5e77cbd185 Add integration_type to Vicare manifest (#155726) 2025-11-03 22:50:41 +01:00
tronikos
2dbc7ff4b7 Remove Enmax Energy virtual integration (#155475) 2025-11-03 22:48:58 +01:00
Kamil Breguła
49a6c5776d Fix typing of ConfigEntry in WLED (#155571)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-11-03 22:01:42 +01:00
TheJulianJES
98f6001c9c Fix ZBT-2 Thread to Zigbee migration discovery failing (#155735) 2025-11-03 20:02:13 +00:00
Bram Kragten
ce38a93177 Update frontend to 20251103.0 (#155734) 2025-11-03 20:45:08 +01:00
Mike Degatano
92fbf468f2 Disable deprecated addon repair (#155739) 2025-11-03 13:08:30 -05:00
Michael Hansen
e09ec4a6f3 Use character code in language matching (voice) (#155738) 2025-11-03 13:07:38 -05:00
Jan Bouwhuis
db63e0c829 Add RSSI signal strength sensor to incomfort boiler (#155688)
Co-authored-by: Shay Levy <levyshay1@gmail.com>
2025-11-03 18:03:46 +01:00
starkillerOG
8ed88d4a58 Add Reolink restart button for IPC cams (#155710) 2025-11-03 16:57:38 +01:00
dependabot[bot]
d098ada777 Bump github/codeql-action from 4.31.0 to 4.31.2 (#155538)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-03 16:52:53 +01:00
Ілля Піскурьов
1add999c5a Add separate scale and offset for current temperature for modbus climates (#150985)
Co-authored-by: jan iversen <jancasacondor@gmail.com>
Co-authored-by: Claudio Ruggeri - CR-Tech <41435902+crug80@users.noreply.github.com>
Co-authored-by: crug80 <claudio@cr-tech.it>
Co-authored-by: Franck Nijhof <git@frenck.dev>
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-11-03 16:51:41 +01:00
Artur Pragacz
fad217837f Accept more templates in service fields (#150239) 2025-11-03 16:40:42 +01:00
Simone Chemelli
983af1af7b Bump aioamazondevices to 6.5.6 (#155723) 2025-11-03 15:59:39 +01:00
Manu
bcf2c4e9b6 Migrate library xbox-webapi to python-xbox in Xbox integration (#155536) 2025-11-03 13:51:40 +01:00
WardZhou
c72f2fd546 Add Matter CurrentSensitivityLevel for Heiman and Aqara Occupancy/PIR (#155715) 2025-11-03 13:47:12 +01:00
Kamil Breguła
f54864a476 Set PARALLEL_UPDATES for WLED (#155573)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-11-03 12:32:43 +01:00
Christopher Fenner
fe1ff456c6 Add labels to selector in Brother config flow (#155659)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
2025-11-03 11:22:00 +01:00
Sander Jochems
ec25ead5ac Add outside temperature sensor to MELCloud Air-to-Air devices (#150722) 2025-11-03 08:52:56 +01:00
nasWebio
e8277cb67c Add alarm control panel platform to NASweb integration (#141582)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-11-03 08:07:53 +01:00
Paulus Schoutsen
da0fb37a20 Fix hassfest brand domain validation (#155701) 2025-11-02 22:58:34 -08:00
Maciej Bieniek
28675eee33 Finish Brother config flow tests by aborting or creating entry (#155663) 2025-11-03 07:50:56 +01:00
Robert Resch
84561cbc41 Use select entity for Ecovacs station auto empty settings (#155679) 2025-11-02 21:38:21 -08:00
Erwin Douna
4e48c881aa Portainer add resource usage of containers (#155113)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-02 21:37:04 -08:00
Joost Lekkerkerker
af8cd0414b Bump python-open-router to 0.3.2 (#155700) 2025-11-02 16:53:50 -08:00
Paulus Schoutsen
f54076da29 Split Yale brand (#155686)
Co-authored-by: J. Nick Koston <nick@koston.org>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-02 19:43:46 -05:00
Matthias Alphart
1d0eb97592 Fix KNX climate loading min/max temp from UI config (#155682) 2025-11-02 16:35:44 -08:00
Robert Resch
57f1c268ef Bump deebot-client to 16.3.0 (#155681) 2025-11-02 16:28:02 -08:00
Jakob Schlyter
01402e4f96 Update regions and voices used by Amazon Polly (#155501) 2025-11-02 16:27:50 -08:00
hanwg
6137a643d8 Fix event entity state update for Telegram bot (#155510) 2025-11-02 16:04:57 -08:00
Michael
1badfe3aff Revert "Remove neato integration (#154902)" (#155685) 2025-11-02 15:58:47 -08:00
starkillerOG
a549104fe1 Bump reolink_aio to 0.16.3 (#155692) 2025-11-02 15:41:08 -08:00
cdnninja
2aab2ddc55 fix vesync mist level value (#155697) 2025-11-02 15:40:01 -08:00
Åke Strandberg
42e01362a5 Bump pymiele dependency to v0.6.0 (#155698) 2025-11-02 15:08:25 -08:00
Ludovic BOUÉ
c3cf24ba25 Add Aqara Presence Multi-Sensor FP300 in Matter tests (#155646) 2025-11-02 20:24:10 +01:00
Ludovic BOUÉ
7809fb6a9b Add Ecovacs Deebot to Matter fixtures (#155587) 2025-11-02 20:23:35 +01:00
David Rapan
144fc2a443 Refactor SQL's data conversion (#155598) 2025-11-02 18:49:18 +01:00
Thomas D
c67e005b2c Use command error message for lock in Volvo integration (#155677) 2025-11-02 18:41:00 +01:00
Maciej Bieniek
1c6913eec2 Add full device tests for new Shelly models (#155669) 2025-11-02 18:26:19 +02:00
Aarni Koskela
fb5c4a1375 Improve Ruuvi Air support (#155678) 2025-11-02 10:16:44 -06:00
Thomas D
60b8392478 Fix device tracker name & icon for Volvo integration (#155667) 2025-11-02 14:57:17 +01:00
Thomas D
7145fb96dd Add lock platform to Volvo integration (#154168) 2025-11-02 14:46:04 +01:00
Maciej Bieniek
37d94aca6d Set PARALLEL_UPDATES to 0 for Brother sesnors (#155662) 2025-11-02 13:25:45 +01:00
Christian Kemper
9b697edfca Support for deactivating oneTimeCharge (#155592)
Signed-off-by: Christian Kemper <dev@bestof5.de>
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-11-02 10:50:56 +01:00
Jordan Harvey
22e30be946 Update pynintendoparental to version 1.1.3 (#155568) 2025-11-02 06:28:21 +01:00
Robert Resch
bc9d35b85f Bump deebot-client to 16.2.0 (#155642) 2025-11-01 22:13:29 -07:00
Diogo Gomes
4dfb6e4983 Bump cronsim to 2.7 (#155648) 2025-11-02 00:25:35 +01:00
Kamil Breguła
09d78ab5ad Use data_description in config_flow for GIOS (#155605)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-02 00:16:09 +01:00
Manu
b2ebdb7ef0 Add friend location to Xbox integration (#155645) 2025-11-01 20:18:51 +01:00
cdnninja
83d6a30b2e Add Child Lock Switch to Vesync (#155643) 2025-11-01 20:18:32 +01:00
J. Nick Koston
19dee6d22a Allow configuring ignored Probe Plus devices (#155635) 2025-11-01 11:43:50 -05:00
J. Nick Koston
afd27630fb Allow configuring ignored Kuler Sky devices (#155634) 2025-11-01 11:40:06 -05:00
J. Nick Koston
cad1f1da1d Allow configuring ignored Elk-M1 devices (#155631) 2025-11-01 18:33:48 +02:00
J. Nick Koston
cd62bd86fd Allow configuring ignored Steamist devices (#155630) 2025-11-01 18:33:22 +02:00
J. Nick Koston
79c3bc9eca Allow ignored snooz devices to be set up from the user flow (#155629) 2025-11-01 18:32:59 +02:00
J. Nick Koston
10439eea4b Allow ignored sensorpro devices to be set up from the user flow (#155628) 2025-11-01 18:21:12 +02:00
J. Nick Koston
75cc866e72 Allow ignored sensirion_ble devices to be set up from the user flow (#155626) 2025-11-01 18:15:23 +02:00
J. Nick Koston
8b2ca6c571 Allow ignored ruuvitag_ble devices to be set up from the user flow (#155625) 2025-11-01 18:14:11 +02:00
cdnninja
52db73e8e3 vesync don't assume fan speed target (#155617) 2025-11-01 18:10:08 +02:00
J. Nick Koston
79d15ec91c Allow ignored moat devices to be set up from the user flow (#155624) 2025-11-01 18:07:09 +02:00
J. Nick Koston
5af91df2b9 Allow ignored melnor devices to be set up from the user flow (#155623) 2025-11-01 18:05:32 +02:00
J. Nick Koston
89a85c3d8c Allow ignored medcom_ble devices to be set up from the user flow (#155622) 2025-11-01 18:04:05 +02:00
J. Nick Koston
e44c6391b1 Allow ignored led_ble devices to be set up from the user flow (#155620) 2025-11-01 17:58:47 +02:00
J. Nick Koston
99d3234855 Allow ignored leaone devices to be set up from the user flow (#155619) 2025-11-01 17:56:04 +02:00
J. Nick Koston
32cc5123f5 Allow ignored ld2410_ble devices to be set up from the user flow (#155618) 2025-11-01 17:54:01 +02:00
J. Nick Koston
93415175bb Allow ignored bluemaestro devices to be set up from the user flow (#155613) 2025-11-01 17:53:16 +02:00
J. Nick Koston
f04bb69dbc Allow ignored keymitt_ble devices to be set up from the user flow (#155616) 2025-11-01 17:51:52 +02:00
J. Nick Koston
9f8c9940bd Allow ignored bluemaestro devices to be set up from the user flow (#155611) 2025-11-01 17:51:16 +02:00
J. Nick Koston
496f527dff Allow ignored kegtron devices to be set up from the user flow (#155614) 2025-11-01 17:50:46 +02:00
Kamil Breguła
385e6f58a8 Set PARALLEL_UPDATES in GIOS (#155604)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2025-11-01 16:25:59 +01:00
Manu
c8c37ad628 Remove unused code in Xbox integration (#155575) 2025-11-01 12:20:19 +01:00
David Rapan
cc57732e24 Rename Starlink Last boot time to Last restart (#155596) 2025-11-01 12:19:05 +01:00
David Rapan
6011df8952 Refactor Starlink sensor construction (#155591)
Signed-off-by: David Rapan <david@rapan.cz>
2025-11-01 12:33:29 +02:00
Matthias Alphart
08e494aba5 Update knx-frontend to 2025.10.31.195356 (#155569) 2025-11-01 11:16:25 +02:00
Manu
77c428e4c7 Add @tr4nt0r as code owner to Xbox integration (#155582) 2025-11-01 11:14:37 +02:00
Manu
c22a2b93fa Bump PSNAWP to 3.0.1 (#155579) 2025-10-31 22:23:32 -07:00
Andrew Jackson
7f84363bf4 Transmission create a common base entity (#155213) 2025-10-31 18:16:35 +01:00
Josef Zweck
0980c3a270 Bump onedrive-personal-sdk to 0.0.15 (#155540) 2025-10-31 16:58:31 +00:00
karwosts
7cec3aa27c Hassfest check for invalid localization placeholders (#155216) 2025-10-31 14:43:11 +01:00
Teemu R.
1ddb39f6d0 Use TEMPERATURE_DELTA for tplink temperature offset (#155239)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-10-31 14:19:28 +01:00
Erwin Douna
10d2e38315 Firefly add reconfigure flow (#155530) 2025-10-31 13:42:30 +01:00
Erwin Douna
5299690cb7 Portainer expand reconfigure check (#155544) 2025-10-31 12:05:48 +01:00
Sid
98c1dca7a8 Bump eheimdigital to 1.4.0 (#155539) 2025-10-31 08:59:34 +01:00
cdnninja
54c022d58a Bump pyvesync to 3.1.4 (#155533) 2025-10-31 08:20:02 +01:00
Mike Degatano
77d40ddc7d Addon progress reporting follow-up from feedback (#155464) 2025-10-31 08:17:09 +01:00
Aronne Brivio
092841ca5e Add auto empty sensor to Ecovacs (#155489)
Co-authored-by: Robert Resch <robert@resch.dev>
2025-10-31 02:24:03 +01:00
Aronne Brivio
70238a613d Add border spin switch to Ecovacs (#155512) 2025-10-31 02:18:39 +01:00
Shay Levy
5b8d373527 Fix Shelly irrigation zone ID retrieval with Sleepy devices (#155514) 2025-10-31 01:05:14 +02:00
Andrew Jackson
4e3664b26f Move Transmission services into separate module (#155490)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-31 00:20:37 +02:00
Robert Resch
76f5cc368b Bump uv to 0.9.6 (#155521) 2025-10-30 23:11:45 +01:00
tronikos
2f4cd21a14 Bump opower to 0.15.9 (#155473) 2025-10-30 21:53:41 +00:00
J. Nick Koston
d369aa761a Bump aioesphomeapi to 42.5.0 (#155481) 2025-10-30 21:52:49 +00:00
Erwin Douna
d795806e3d Portainer refactor CONF_VERIFY_SSL (#155520) 2025-10-30 20:27:39 +01:00
Bram Kragten
d45a80ed06 Update frontend to 20251029.1 (#155513) 2025-10-30 19:49:37 +01:00
Erwin Douna
09b46d22af Firefly fix config flow (#155503) 2025-10-30 19:06:04 +01:00
Artur Pragacz
b157afac13 Remove templates from schemas for service fields validation (#150063) 2025-10-30 18:46:43 +01:00
464 changed files with 31027 additions and 2713 deletions

View File

@@ -88,6 +88,10 @@ jobs:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
exclude:
- arch: armv7
- arch: armhf
- arch: i386
steps:
- name: Checkout the repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with:
category: "/language:python"

View File

@@ -362,6 +362,7 @@ homeassistant.components.myuplink.*
homeassistant.components.nam.*
homeassistant.components.nanoleaf.*
homeassistant.components.nasweb.*
homeassistant.components.neato.*
homeassistant.components.nest.*
homeassistant.components.netatmo.*
homeassistant.components.network.*

8
CODEOWNERS generated
View File

@@ -1539,8 +1539,8 @@ build.json @home-assistant/supervisor
/tests/components/suez_water/ @ooii @jb101010-2
/homeassistant/components/sun/ @home-assistant/core
/tests/components/sun/ @home-assistant/core
/homeassistant/components/sunricher_dali_center/ @niracler
/tests/components/sunricher_dali_center/ @niracler
/homeassistant/components/sunricher_dali/ @niracler
/tests/components/sunricher_dali/ @niracler
/homeassistant/components/supla/ @mwegrzynek
/homeassistant/components/surepetcare/ @benleb @danielhiversen
/tests/components/surepetcare/ @benleb @danielhiversen
@@ -1817,8 +1817,8 @@ build.json @home-assistant/supervisor
/tests/components/ws66i/ @ssaenger
/homeassistant/components/wyoming/ @synesthesiam
/tests/components/wyoming/ @synesthesiam
/homeassistant/components/xbox/ @hunterjm
/tests/components/xbox/ @hunterjm
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
/tests/components/xbox/ @hunterjm @tr4nt0r
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.9.5
RUN pip3 install uv==0.9.6
WORKDIR /usr/src

View File

@@ -1,7 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
cosign:
base_identity: https://github.com/home-assistant/docker/.*
identity: https://github.com/home-assistant/core/.*

View File

@@ -1,11 +1,5 @@
{
"domain": "yale",
"name": "Yale",
"integrations": [
"august",
"yale_smart_alarm",
"yalexs_ble",
"yale_home",
"yale"
]
"name": "Yale (non-US/Canada)",
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
}

View File

@@ -0,0 +1,5 @@
{
"domain": "yale_august",
"name": "Yale August (US/Canada)",
"integrations": ["august", "august_ble"]
}

View File

@@ -30,6 +30,7 @@ generate_data:
media:
accept:
- "*"
multiple: true
generate_image:
fields:
task_name:
@@ -57,3 +58,4 @@ generate_image:
media:
accept:
- "*"
multiple: true

View File

@@ -58,7 +58,10 @@ from homeassistant.const import (
from homeassistant.helpers import network
from homeassistant.util import color as color_util, dt as dt_util
from homeassistant.util.decorator import Registry
from homeassistant.util.unit_conversion import TemperatureConverter
from homeassistant.util.unit_conversion import (
TemperatureConverter,
TemperatureDeltaConverter,
)
from .config import AbstractConfig
from .const import (
@@ -844,7 +847,7 @@ def temperature_from_object(
temp -= 273.15
if interval:
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
return TemperatureConverter.convert(temp, from_unit, to_unit)

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.5.5"]
"requirements": ["aioamazondevices==6.5.6"]
}

View File

@@ -106,7 +106,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="daily_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
suggested_display_precision=2,
),
SensorEntityDescription(
@@ -150,7 +150,7 @@ SENSOR_DESCRIPTIONS = (
key=TYPE_LIGHTNING_PER_DAY,
translation_key="lightning_strikes_per_day",
native_unit_of_measurement="strikes",
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
@@ -182,7 +182,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="monthly_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
suggested_display_precision=2,
entity_registry_enabled_default=False,
),
@@ -229,7 +229,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="weekly_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
suggested_display_precision=2,
entity_registry_enabled_default=False,
),
@@ -262,7 +262,7 @@ SENSOR_DESCRIPTIONS = (
translation_key="yearly_rain",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
suggested_display_precision=2,
entity_registry_enabled_default=False,
),

View File

@@ -39,11 +39,11 @@ from .const import (
CONF_TURN_OFF_COMMAND,
CONF_TURN_ON_COMMAND,
DEFAULT_ADB_SERVER_PORT,
DEFAULT_DEVICE_CLASS,
DEFAULT_EXCLUDE_UNNAMED_APPS,
DEFAULT_GET_SOURCES,
DEFAULT_PORT,
DEFAULT_SCREENCAP_INTERVAL,
DEVICE_AUTO,
DEVICE_CLASSES,
DOMAIN,
PROP_ETHMAC,
@@ -89,8 +89,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
data_schema = vol.Schema(
{
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
DEVICE_CLASSES
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
SelectSelectorConfig(
options=[
SelectOptionDict(value=k, label=v)
for k, v in DEVICE_CLASSES.items()
],
translation_key="device_class",
)
),
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
},

View File

@@ -15,15 +15,19 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
CONF_TURN_ON_COMMAND = "turn_on_command"
DEFAULT_ADB_SERVER_PORT = 5037
DEFAULT_DEVICE_CLASS = "auto"
DEFAULT_EXCLUDE_UNNAMED_APPS = False
DEFAULT_GET_SOURCES = True
DEFAULT_PORT = 5555
DEFAULT_SCREENCAP_INTERVAL = 5
DEVICE_AUTO = "auto"
DEVICE_ANDROIDTV = "androidtv"
DEVICE_FIRETV = "firetv"
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
DEVICE_CLASSES = {
DEVICE_AUTO: "auto",
DEVICE_ANDROIDTV: "Android TV",
DEVICE_FIRETV: "Fire TV",
}
PROP_ETHMAC = "ethmac"
PROP_SERIALNO = "serialno"

View File

@@ -65,6 +65,13 @@
}
}
},
"selector": {
"device_class": {
"options": {
"auto": "Auto-detect device type"
}
}
},
"services": {
"adb_command": {
"description": "Sends an ADB command to an Android / Fire TV device.",

View File

@@ -8,6 +8,6 @@
"integration_type": "service",
"iot_class": "calculated",
"quality_scale": "internal",
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
"single_config_entry": true
}

View File

@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
title=self._discovered_devices[address], data={}
)
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass, False):
address = discovery_info.address
if address in current_addresses or address in self._discovered_devices:

View File

@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
from homeassistant.components.snmp import async_get_snmp_engine
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from .const import (
CONF_COMMUNITY,
@@ -50,6 +50,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
await coordinator.async_config_entry_first_refresh()
if brother.serial.lower() != entry.unique_id:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="serial_mismatch",
translation_placeholders={
"device": entry.title,
},
)
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

View File

@@ -13,6 +13,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import section
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from homeassistant.util.network import is_host_valid
@@ -21,6 +22,7 @@ from .const import (
DEFAULT_COMMUNITY,
DEFAULT_PORT,
DOMAIN,
PRINTER_TYPE_LASER,
PRINTER_TYPES,
SECTION_ADVANCED_SETTINGS,
)
@@ -28,7 +30,12 @@ from .const import (
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
SelectSelectorConfig(
options=PRINTER_TYPES,
translation_key="printer_type",
)
),
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{
@@ -42,7 +49,12 @@ DATA_SCHEMA = vol.Schema(
)
ZEROCONF_SCHEMA = vol.Schema(
{
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
SelectSelectorConfig(
options=PRINTER_TYPES,
translation_key="printer_type",
)
),
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{

View File

@@ -7,7 +7,10 @@ from typing import Final
DOMAIN: Final = "brother"
PRINTER_TYPES: Final = ["laser", "ink"]
PRINTER_TYPE_LASER = "laser"
PRINTER_TYPE_INK = "ink"
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
UPDATE_INTERVAL = timedelta(seconds=30)

View File

@@ -0,0 +1,30 @@
"""Define the Brother entity."""
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import BrotherDataUpdateCoordinator
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
"""Define a Brother Printer entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: BrotherDataUpdateCoordinator,
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
configuration_url=f"http://{coordinator.brother.host}/",
identifiers={(DOMAIN, coordinator.brother.serial)},
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
serial_number=coordinator.brother.serial,
manufacturer="Brother",
model=coordinator.brother.model,
name=coordinator.brother.model,
sw_version=coordinator.brother.firmware,
)

View File

@@ -19,13 +19,15 @@ from homeassistant.components.sensor import (
from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
from .entity import BrotherPrinterEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
ATTR_COUNTER = "counter"
ATTR_REMAINING_PAGES = "remaining_pages"
@@ -330,12 +332,9 @@ async def async_setup_entry(
)
class BrotherPrinterSensor(
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
):
"""Define an Brother Printer sensor."""
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
"""Define a Brother Printer sensor."""
_attr_has_entity_name = True
entity_description: BrotherSensorEntityDescription
def __init__(
@@ -345,16 +344,7 @@ class BrotherPrinterSensor(
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
configuration_url=f"http://{coordinator.brother.host}/",
identifiers={(DOMAIN, coordinator.brother.serial)},
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
serial_number=coordinator.brother.serial,
manufacturer="Brother",
model=coordinator.brother.model,
name=coordinator.brother.model,
sw_version=coordinator.brother.firmware,
)
self._attr_native_value = description.value(coordinator.data)
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
self.entity_description = description

View File

@@ -38,11 +38,11 @@
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
"type": "Type of the printer"
"type": "Printer type"
},
"data_description": {
"host": "The hostname or IP address of the Brother printer to control.",
"type": "Brother printer type: ink or laser."
"type": "The type of the Brother printer."
},
"sections": {
"advanced_settings": {
@@ -207,8 +207,19 @@
"cannot_connect": {
"message": "An error occurred while connecting to the {device} printer: {error}"
},
"serial_mismatch": {
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
},
"update_error": {
"message": "An error occurred while retrieving data from the {device} printer: {error}"
}
},
"selector": {
"printer_type": {
"options": {
"ink": "ink",
"laser": "laser"
}
}
}
}

View File

@@ -189,7 +189,7 @@ class BryantEvolutionClimate(ClimateEntity):
return HVACAction.HEATING
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_parse_hvac_mode",
translation_key="failed_to_parse_hvac_action",
translation_placeholders={
"mode_and_active": mode_and_active,
"current_temperature": str(self.current_temperature),

View File

@@ -24,7 +24,7 @@
},
"exceptions": {
"failed_to_parse_hvac_action": {
"message": "Could not determine HVAC action: {mode_and_active}, {self.current_temperature}, {self.target_temperature_low}"
"message": "Could not determine HVAC action: {mode_and_active}, {current_temperature}, {target_temperature_low}"
},
"failed_to_parse_hvac_mode": {
"message": "Cannot parse response to HVACMode: {mode}"

View File

@@ -71,8 +71,11 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
services = await account_link.async_fetch_available_services(
hass.data[DATA_CLOUD]
)
except (aiohttp.ClientError, TimeoutError):
return []
except (aiohttp.ClientError, TimeoutError) as err:
raise config_entry_oauth2_flow.ImplementationUnavailableError(
"Cannot provide OAuth2 implementation for cloud services. "
"Failed to fetch from account link server."
) from err
hass.data[DATA_SERVICES] = services

View File

@@ -6,3 +6,5 @@ DEFAULT_PORT = 10102
CONF_SUPPORTED_MODES = "supported_modes"
CONF_SWING_SUPPORT = "swing_support"
MAX_RETRIES = 3
BACKOFF_BASE_DELAY = 2

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
import logging
from pycoolmasternet_async import CoolMasterNet
@@ -12,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
_LOGGER = logging.getLogger(__name__)
@@ -46,7 +47,34 @@ class CoolmasterDataUpdateCoordinator(
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
"""Fetch data from Coolmaster."""
try:
return await self._coolmaster.status()
except OSError as error:
raise UpdateFailed from error
retries_left = MAX_RETRIES
status: dict[str, CoolMasterNetUnit] = {}
while retries_left > 0 and not status:
retries_left -= 1
try:
status = await self._coolmaster.status()
except OSError as error:
if retries_left == 0:
raise UpdateFailed(
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
) from error
_LOGGER.debug(
"Error communicating with coolmaster (%d retries left): %s",
retries_left,
str(error),
)
else:
if status:
return status
_LOGGER.debug(
"Error communicating with coolmaster: empty status received (%d retries left)",
retries_left,
)
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
await asyncio.sleep(backoff)
raise UpdateFailed(
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
)

View File

@@ -81,6 +81,9 @@
"active_map": {
"default": "mdi:floor-plan"
},
"auto_empty": {
"default": "mdi:delete-empty"
},
"water_amount": {
"default": "mdi:water"
},
@@ -160,6 +163,9 @@
"advanced_mode": {
"default": "mdi:tune"
},
"border_spin": {
"default": "mdi:rotate-right"
},
"border_switch": {
"default": "mdi:land-fields"
},

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==16.1.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
}

View File

@@ -5,8 +5,9 @@ from dataclasses import dataclass
from typing import TYPE_CHECKING, Any
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
from deebot_client.command import CommandWithMessageHandling
from deebot_client.device import Device
from deebot_client.events import WorkModeEvent
from deebot_client.events import WorkModeEvent, auto_empty
from deebot_client.events.base import Event
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
from deebot_client.events.water_info import WaterAmountEvent
@@ -34,6 +35,9 @@ class EcovacsSelectEntityDescription[EventT: Event](
current_option_fn: Callable[[EventT], str | None]
options_fn: Callable[[CapabilitySetTypes], list[str]]
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
lambda cap, option: cap.set(option)
)
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
@@ -58,6 +62,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
set_option_fn=lambda cap, option: cap.set(None, option),
key="auto_empty",
translation_key="auto_empty",
),
)
@@ -106,14 +118,17 @@ class EcovacsSelectEntity[EventT: Event](
await super().async_added_to_hass()
async def on_event(event: EventT) -> None:
self._attr_current_option = self.entity_description.current_option_fn(event)
self.async_write_ha_state()
if (option := self.entity_description.current_option_fn(event)) is not None:
self._attr_current_option = option
self.async_write_ha_state()
self._subscribe(self._capability.event, on_event)
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
await self._device.execute_command(self._capability.set(option))
await self._device.execute_command(
self.entity_description.set_option_fn(self._capability, option)
)
class EcovacsActiveMapSelectEntity(

View File

@@ -129,6 +129,16 @@
"active_map": {
"name": "Active map"
},
"auto_empty": {
"name": "Auto-empty frequency",
"state": {
"auto": "Auto",
"min_10": "10 minutes",
"min_15": "15 minutes",
"min_25": "25 minutes",
"smart": "Smart"
}
},
"water_amount": {
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
"state": {
@@ -231,6 +241,9 @@
"advanced_mode": {
"name": "Advanced mode"
},
"border_spin": {
"name": "Border spin"
},
"border_switch": {
"name": "Border switch"
},

View File

@@ -99,6 +99,13 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSwitchEntityDescription, ...] = (
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
EcovacsSwitchEntityDescription(
capability_fn=lambda c: c.settings.border_spin,
key="border_spin",
translation_key="border_spin",
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
)

View File

@@ -151,14 +151,12 @@ ECOWITT_SENSORS_MAPPING: Final = {
key="RAIN_COUNT_MM",
native_unit_of_measurement=UnitOfPrecipitationDepth.MILLIMETERS,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=1,
),
EcoWittSensorTypes.RAIN_COUNT_INCHES: SensorEntityDescription(
key="RAIN_COUNT_INCHES",
native_unit_of_measurement=UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=2,
),
EcoWittSensorTypes.RAIN_RATE_MM: SensorEntityDescription(

View File

@@ -8,7 +8,7 @@
"iot_class": "local_polling",
"loggers": ["eheimdigital"],
"quality_scale": "platinum",
"requirements": ["eheimdigital==1.3.0"],
"requirements": ["eheimdigital==1.4.0"],
"zeroconf": [
{ "name": "eheimdigital._http._tcp.local.", "type": "_http._tcp.local." }
]

View File

@@ -296,7 +296,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN):
return await self.async_step_discovered_connection()
return await self.async_step_manual_connection()
current_unique_ids = self._async_current_ids()
current_unique_ids = self._async_current_ids(include_ignore=False)
current_hosts = {
hostname_from_url(entry.data[CONF_HOST])
for entry in self._async_current_entries(include_ignore=False)

View File

@@ -1 +0,0 @@
"""Virtual integration: Enmax Energy."""

View File

@@ -1,6 +0,0 @@
{
"domain": "enmax",
"name": "Enmax Energy",
"integration_type": "virtual",
"supported_by": "opower"
}

View File

@@ -2,7 +2,9 @@
from __future__ import annotations
from aioesphomeapi import APIClient
import logging
from aioesphomeapi import APIClient, APIConnectionError
from homeassistant.components import zeroconf
from homeassistant.components.bluetooth import async_remove_scanner
@@ -20,9 +22,12 @@ from homeassistant.helpers.typing import ConfigType
from . import assist_satellite, dashboard, ffmpeg_proxy
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DOMAIN
from .domain_data import DomainData
from .encryption_key_storage import async_get_encryption_key_storage
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
from .manager import DEVICE_CONFLICT_ISSUE_FORMAT, ESPHomeManager, cleanup_instance
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
CLIENT_INFO = f"Home Assistant {ha_version}"
@@ -75,10 +80,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> b
async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> bool:
"""Unload an esphome config entry."""
entry_data = await cleanup_instance(entry)
return await hass.config_entries.async_unload_platforms(
entry, entry_data.loaded_platforms
unload_ok = await hass.config_entries.async_unload_platforms(
entry, entry.runtime_data.loaded_platforms
)
if unload_ok:
await cleanup_instance(entry)
return unload_ok
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
@@ -89,3 +96,57 @@ async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) ->
hass, DOMAIN, DEVICE_CONFLICT_ISSUE_FORMAT.format(entry.entry_id)
)
await DomainData.get(hass).get_or_create_store(hass, entry).async_remove()
await _async_clear_dynamic_encryption_key(hass, entry)
async def _async_clear_dynamic_encryption_key(
hass: HomeAssistant, entry: ESPHomeConfigEntry
) -> None:
"""Clear the dynamic encryption key on the device and from storage."""
if entry.unique_id is None or entry.data.get(CONF_NOISE_PSK) is None:
return
# Only clear the key if it's stored in our storage, meaning it was
# dynamically generated by us and not user-provided
storage = await async_get_encryption_key_storage(hass)
if await storage.async_get_key(entry.unique_id) is None:
return
host: str = entry.data[CONF_HOST]
port: int = entry.data[CONF_PORT]
password: str | None = entry.data[CONF_PASSWORD]
noise_psk: str | None = entry.data.get(CONF_NOISE_PSK)
zeroconf_instance = await zeroconf.async_get_instance(hass)
cli = APIClient(
host,
port,
password,
client_info=CLIENT_INFO,
zeroconf_instance=zeroconf_instance,
noise_psk=noise_psk,
timezone=hass.config.time_zone,
)
try:
await cli.connect()
# Clear the encryption key on the device by passing an empty key
if not await cli.noise_encryption_set_key(b""):
_LOGGER.debug(
"Could not clear dynamic encryption key for ESPHome device %s: Device rejected key removal",
entry.unique_id,
)
return
except APIConnectionError as exc:
_LOGGER.debug(
"Could not connect to ESPHome device %s to clear dynamic encryption key: %s",
entry.unique_id,
exc,
)
return
finally:
await cli.disconnect()
await storage.async_remove_key(entry.unique_id)

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==42.4.0",
"aioesphomeapi==42.6.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.4.0"
],

View File

@@ -77,7 +77,7 @@ class EufyLifeConfigFlow(ConfigFlow, domain=DOMAIN):
data={CONF_MODEL: model},
)
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass, False):
address = discovery_info.address
if (

View File

@@ -40,7 +40,9 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
client = Firefly(
api_url=data[CONF_URL],
api_key=data[CONF_API_KEY],
session=async_get_clientsession(hass),
session=async_get_clientsession(
hass=hass, verify_ssl=data[CONF_VERIFY_SSL]
),
)
await client.get_about()
except FireflyAuthenticationError:
@@ -127,6 +129,51 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
reconf_entry = self._get_reconfigure_entry()
if user_input:
try:
await _validate_input(
self.hass,
data={
**reconf_entry.data,
**user_input,
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except FireflyClientTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
return self.async_update_reload_and_abort(
reconf_entry,
data_updates={
CONF_URL: user_input[CONF_URL],
CONF_API_KEY: user_input[CONF_API_KEY],
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
data_schema=STEP_USER_DATA_SCHEMA,
suggested_values=user_input or reconf_entry.data.copy(),
),
errors=errors,
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""

View File

@@ -2,7 +2,8 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -20,6 +21,20 @@
},
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
},
"reconfigure": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"url": "[%key:common::config_flow::data::url%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"api_key": "[%key:component::firefly_iii::config::step::user::data_description::api_key%]",
"url": "[%key:common::config_flow::data::url%]",
"verify_ssl": "[%key:component::firefly_iii::config::step::user::data_description::verify_ssl%]"
},
"description": "Use the following form to reconfigure your Firefly III instance.",
"title": "Reconfigure Firefly III Integration"
},
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/foscam",
"iot_class": "local_polling",
"loggers": ["libpyfoscamcgi"],
"requirements": ["libpyfoscamcgi==0.0.8"]
"requirements": ["libpyfoscamcgi==0.0.9"]
}

View File

@@ -453,7 +453,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.http.app.router.register_resource(IndexView(repo_path, hass))
async_register_built_in_panel(hass, "light")
async_register_built_in_panel(hass, "safety")
async_register_built_in_panel(hass, "security")
async_register_built_in_panel(hass, "climate")
async_register_built_in_panel(hass, "profile")

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20251029.0"]
"requirements": ["home-assistant-frontend==20251105.0"]
}

View File

@@ -43,6 +43,9 @@ from .coordinator import GiosConfigEntry, GiosDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class GiosSensorEntityDescription(SensorEntityDescription):

View File

@@ -14,6 +14,10 @@
"name": "[%key:common::config_flow::data::name%]",
"station_id": "Measuring station"
},
"data_description": {
"name": "Config entry name, by default, this is the name of your Home Assistant instance.",
"station_id": "The name of the measuring station where the environmental data is collected."
},
"title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)"
}
}

View File

@@ -620,7 +620,11 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
# Pop add-on data
# Unload coordinator
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
coordinator.unload()
# Pop coordinator
hass.data.pop(ADDONS_COORDINATOR, None)
return unload_ok

View File

@@ -3,6 +3,9 @@
from __future__ import annotations
from dataclasses import dataclass
import itertools
from aiohasupervisor.models.mounts import MountState
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -13,8 +16,14 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
from .entity import HassioAddonEntity
from .const import (
ADDONS_COORDINATOR,
ATTR_STARTED,
ATTR_STATE,
DATA_KEY_ADDONS,
DATA_KEY_MOUNTS,
)
from .entity import HassioAddonEntity, HassioMountEntity
@dataclass(frozen=True)
@@ -34,6 +43,16 @@ ADDON_ENTITY_DESCRIPTIONS = (
),
)
MOUNT_ENTITY_DESCRIPTIONS = (
HassioBinarySensorEntityDescription(
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_registry_enabled_default=False,
key=ATTR_STATE,
translation_key="mount",
target=MountState.ACTIVE.value,
),
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -44,13 +63,26 @@ async def async_setup_entry(
coordinator = hass.data[ADDONS_COORDINATOR]
async_add_entities(
HassioAddonBinarySensor(
addon=addon,
coordinator=coordinator,
entity_description=entity_description,
itertools.chain(
[
HassioAddonBinarySensor(
addon=addon,
coordinator=coordinator,
entity_description=entity_description,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
],
[
HassioMountBinarySensor(
mount=mount,
coordinator=coordinator,
entity_description=entity_description,
)
for mount in coordinator.data[DATA_KEY_MOUNTS].values()
for entity_description in MOUNT_ENTITY_DESCRIPTIONS
],
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
)
@@ -68,3 +100,20 @@ class HassioAddonBinarySensor(HassioAddonEntity, BinarySensorEntity):
if self.entity_description.target is None:
return value
return value == self.entity_description.target
class HassioMountBinarySensor(HassioMountEntity, BinarySensorEntity):
"""Binary sensor for Hass.io mount."""
entity_description: HassioBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
value = getattr(
self.coordinator.data[DATA_KEY_MOUNTS][self._mount.name],
self.entity_description.key,
)
if self.entity_description.target is None:
return value
return value == self.entity_description.target

View File

@@ -90,6 +90,7 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats"
DATA_MOUNTS_INFO = "hassio_mounts_info"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
ATTR_AUTO_UPDATE = "auto_update"
@@ -110,6 +111,7 @@ DATA_KEY_SUPERVISOR = "supervisor"
DATA_KEY_CORE = "core"
DATA_KEY_HOST = "host"
DATA_KEY_SUPERVISOR_ISSUES = "supervisor_issues"
DATA_KEY_MOUNTS = "mounts"
PLACEHOLDER_KEY_ADDON = "addon"
PLACEHOLDER_KEY_ADDON_INFO = "addon_info"
@@ -174,3 +176,4 @@ class SupervisorEntityModel(StrEnum):
CORE = "Home Assistant Core"
SUPERVISOR = "Home Assistant Supervisor"
HOST = "Home Assistant Host"
MOUNT = "Home Assistant Mount"

View File

@@ -10,6 +10,11 @@ from typing import TYPE_CHECKING, Any
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
from aiohasupervisor.models import StoreInfo
from aiohasupervisor.models.mounts import (
CIFSMountResponse,
MountsInfo,
NFSMountResponse,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
@@ -41,9 +46,11 @@ from .const import (
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_MOUNTS,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
DATA_KEY_SUPERVISOR_ISSUES,
DATA_MOUNTS_INFO,
DATA_NETWORK_INFO,
DATA_OS_INFO,
DATA_STORE,
@@ -174,6 +181,16 @@ def get_core_info(hass: HomeAssistant) -> dict[str, Any] | None:
return hass.data.get(DATA_CORE_INFO)
@callback
@bind_hass
def get_mounts_info(hass: HomeAssistant) -> MountsInfo | None:
"""Return Home Assistant mounts information from Supervisor.
Async friendly.
"""
return hass.data.get(DATA_MOUNTS_INFO)
@callback
@bind_hass
def get_issues_info(hass: HomeAssistant) -> SupervisorIssues | None:
@@ -203,6 +220,25 @@ def async_register_addons_in_dev_reg(
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
@callback
def async_register_mounts_in_dev_reg(
entry_id: str,
dev_reg: dr.DeviceRegistry,
mounts: list[CIFSMountResponse | NFSMountResponse],
) -> None:
"""Register mounts in the device registry."""
for mount in mounts:
params = DeviceInfo(
identifiers={(DOMAIN, f"mount_{mount.name}")},
manufacturer="Home Assistant",
model=SupervisorEntityModel.MOUNT,
model_id=f"{mount.usage}/{mount.type}",
name=mount.name,
entry_type=dr.DeviceEntryType.SERVICE,
)
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
@callback
def async_register_os_in_dev_reg(
entry_id: str, dev_reg: dr.DeviceRegistry, os_dict: dict[str, Any]
@@ -272,12 +308,12 @@ def async_register_supervisor_in_dev_reg(
@callback
def async_remove_addons_from_dev_reg(
dev_reg: dr.DeviceRegistry, addons: set[str]
def async_remove_devices_from_dev_reg(
dev_reg: dr.DeviceRegistry, devices: set[str]
) -> None:
"""Remove addons from the device registry."""
for addon_slug in addons:
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, addon_slug)}):
"""Remove devices from the device registry."""
for device in devices:
if dev := dev_reg.async_get_device(identifiers={(DOMAIN, device)}):
dev_reg.async_remove_device(dev.id)
@@ -362,12 +398,19 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
**get_supervisor_stats(self.hass),
}
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
new_data[DATA_KEY_MOUNTS] = {
mount.name: mount
for mount in getattr(get_mounts_info(self.hass), "mounts", [])
}
# If this is the initial refresh, register all addons and return the dict
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
)
async_register_mounts_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
)
async_register_core_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
)
@@ -389,7 +432,20 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
if device.model == SupervisorEntityModel.ADDON
}
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
# Remove mounts that no longer exists from device registry
supervisor_mount_devices = {
device.name
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
self.entry_id
)
if device.model == SupervisorEntityModel.MOUNT
}
if stale_mounts := supervisor_mount_devices - set(new_data[DATA_KEY_MOUNTS]):
async_remove_devices_from_dev_reg(
self.dev_reg, {f"mount_{stale_mount}" for stale_mount in stale_mounts}
)
if not self.is_hass_os and (
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
@@ -397,11 +453,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
# Remove the OS device if it exists and the installation is not hassos
self.dev_reg.async_remove_device(dev.id)
# If there are new add-ons, we should reload the config entry so we can
# If there are new add-ons or mounts, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
if self.data and set(new_data[DATA_KEY_ADDONS]) - set(
self.data[DATA_KEY_ADDONS]
if self.data and (
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
@@ -428,6 +485,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
DATA_CORE_INFO: hassio.get_core_info(),
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
DATA_OS_INFO: hassio.get_os_info(),
DATA_MOUNTS_INFO: self.supervisor_client.mounts.info(),
}
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
updates[DATA_CORE_STATS] = hassio.get_core_stats()
@@ -563,3 +621,8 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
@callback
def unload(self) -> None:
"""Clean up when config entry unloaded."""
self.jobs.unload()

View File

@@ -4,6 +4,8 @@ from __future__ import annotations
from typing import Any
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
@@ -15,6 +17,7 @@ from .const import (
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_MOUNTS,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
DOMAIN,
@@ -192,3 +195,34 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base Entity for Mount."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
entity_description: EntityDescription,
mount: CIFSMountResponse | NFSMountResponse,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = (
f"home_assistant_mount_{mount.name}_{entity_description.key}"
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"mount_{mount.name}")}
)
self._mount = mount
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and self._mount.name in self.coordinator.data[DATA_KEY_MOUNTS]
)

View File

@@ -44,7 +44,6 @@ from .const import (
EVENT_SUPPORTED_CHANGED,
EXTRA_PLACEHOLDERS,
ISSUE_KEY_ADDON_BOOT_FAIL,
ISSUE_KEY_ADDON_DEPRECATED,
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
ISSUE_KEY_ADDON_PWNED,
@@ -87,7 +86,6 @@ ISSUE_KEYS_FOR_REPAIRS = {
"issue_system_disk_lifetime",
ISSUE_KEY_SYSTEM_FREE_SPACE,
ISSUE_KEY_ADDON_PWNED,
ISSUE_KEY_ADDON_DEPRECATED,
}
_LOGGER = logging.getLogger(__name__)

View File

@@ -3,6 +3,7 @@
from collections.abc import Callable
from dataclasses import dataclass, replace
from functools import partial
import logging
from typing import Any
from uuid import UUID
@@ -29,6 +30,8 @@ from .const import (
)
from .handler import get_supervisor_client
_LOGGER = logging.getLogger(__name__)
@dataclass(slots=True, frozen=True)
class JobSubscription:
@@ -45,7 +48,7 @@ class JobSubscription:
event_callback: Callable[[Job], Any]
uuid: str | None = None
name: str | None = None
reference: str | None | type[Any] = Any
reference: str | None = None
def __post_init__(self) -> None:
"""Validate at least one filter option is present."""
@@ -58,7 +61,7 @@ class JobSubscription:
"""Return true if job matches subscription filters."""
if self.uuid:
return job.uuid == self.uuid
return job.name == self.name and self.reference in (Any, job.reference)
return job.name == self.name and self.reference in (None, job.reference)
class SupervisorJobs:
@@ -70,6 +73,7 @@ class SupervisorJobs:
self._supervisor_client = get_supervisor_client(hass)
self._jobs: dict[UUID, Job] = {}
self._subscriptions: set[JobSubscription] = set()
self._dispatcher_disconnect: Callable[[], None] | None = None
@property
def current_jobs(self) -> list[Job]:
@@ -79,20 +83,24 @@ class SupervisorJobs:
def subscribe(self, subscription: JobSubscription) -> CALLBACK_TYPE:
"""Subscribe to updates for job. Return callback is used to unsubscribe.
If any jobs match the subscription at the time this is called, creates
tasks to run their callback on it.
If any jobs match the subscription at the time this is called, runs the
callback on them.
"""
self._subscriptions.add(subscription)
# As these are callbacks they are safe to run in the event loop
# We wrap these in an asyncio task so subscribing does not wait on the logic
if matches := [job for job in self._jobs.values() if subscription.matches(job)]:
async def event_callback_async(job: Job) -> Any:
return subscription.event_callback(job)
for match in matches:
self._hass.async_create_task(event_callback_async(match))
# Run the callback on each existing match
# We catch all errors to prevent an error in one from stopping the others
for match in [job for job in self._jobs.values() if subscription.matches(job)]:
try:
return subscription.event_callback(match)
except Exception as err: # noqa: BLE001
_LOGGER.error(
"Error encountered processing Supervisor Job (%s %s %s) - %s",
match.name,
match.reference,
match.uuid,
err,
)
return partial(self._subscriptions.discard, subscription)
@@ -131,7 +139,7 @@ class SupervisorJobs:
# If this is the first update register to receive Supervisor events
if first_update:
async_dispatcher_connect(
self._dispatcher_disconnect = async_dispatcher_connect(
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_jobs
)
@@ -158,3 +166,14 @@ class SupervisorJobs:
for sub in self._subscriptions:
if sub.matches(job):
sub.event_callback(job)
# If the job is done, pop it from our cache if present after processing is done
if job.done and job.uuid in self._jobs:
del self._jobs[job.uuid]
@callback
def unload(self) -> None:
"""Unregister with dispatcher on config entry unload."""
if self._dispatcher_disconnect:
self._dispatcher_disconnect()
self._dispatcher_disconnect = None

View File

@@ -1,6 +1,9 @@
{
"entity": {
"binary_sensor": {
"mount": {
"name": "Connected"
},
"state": {
"name": "Running"
}

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/holiday",
"iot_class": "local_polling",
"requirements": ["holidays==0.83", "babel==2.15.0"]
"requirements": ["holidays==0.84", "babel==2.15.0"]
}

View File

@@ -39,6 +39,8 @@ from .const import (
NABU_CASA_FIRMWARE_RELEASES_URL,
PID,
PRODUCT,
RADIO_TX_POWER_DBM_BY_COUNTRY,
RADIO_TX_POWER_DBM_DEFAULT,
SERIAL_NUMBER,
VID,
)
@@ -75,6 +77,7 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
context: ConfigFlowContext
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
ZIGBEE_BAUDRATE = 460800
async def async_step_install_zigbee_firmware(
self, user_input: dict[str, Any] | None = None
@@ -102,6 +105,21 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
next_step_id="finish_thread_installation",
)
def _extra_zha_hardware_options(self) -> dict[str, Any]:
"""Return extra ZHA hardware options."""
country = self.hass.config.country
if country is None:
tx_power = RADIO_TX_POWER_DBM_DEFAULT
else:
tx_power = RADIO_TX_POWER_DBM_BY_COUNTRY.get(
country, RADIO_TX_POWER_DBM_DEFAULT
)
return {
"tx_power": tx_power,
}
class HomeAssistantConnectZBT2ConfigFlow(
ZBT2FirmwareMixin,
@@ -112,7 +130,6 @@ class HomeAssistantConnectZBT2ConfigFlow(
VERSION = 1
MINOR_VERSION = 1
ZIGBEE_BAUDRATE = 460800
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Initialize the config flow."""

View File

@@ -1,5 +1,7 @@
"""Constants for the Home Assistant Connect ZBT-2 integration."""
from homeassistant.generated.countries import COUNTRIES
DOMAIN = "homeassistant_connect_zbt2"
NABU_CASA_FIRMWARE_RELEASES_URL = (
@@ -17,3 +19,59 @@ VID = "vid"
DEVICE = "device"
HARDWARE_NAME = "Home Assistant Connect ZBT-2"
RADIO_TX_POWER_DBM_DEFAULT = 8
RADIO_TX_POWER_DBM_BY_COUNTRY = {
# EU Member States
"AT": 10,
"BE": 10,
"BG": 10,
"HR": 10,
"CY": 10,
"CZ": 10,
"DK": 10,
"EE": 10,
"FI": 10,
"FR": 10,
"DE": 10,
"GR": 10,
"HU": 10,
"IE": 10,
"IT": 10,
"LV": 10,
"LT": 10,
"LU": 10,
"MT": 10,
"NL": 10,
"PL": 10,
"PT": 10,
"RO": 10,
"SK": 10,
"SI": 10,
"ES": 10,
"SE": 10,
# EEA Members
"IS": 10,
"LI": 10,
"NO": 10,
# Standards harmonized with RED or ETSI
"CH": 10,
"GB": 10,
"TR": 10,
"AL": 10,
"BA": 10,
"GE": 10,
"MD": 10,
"ME": 10,
"MK": 10,
"RS": 10,
"UA": 10,
# Other CEPT nations
"AD": 10,
"AZ": 10,
"MC": 10,
"SM": 10,
"VA": 10,
}
assert set(RADIO_TX_POWER_DBM_BY_COUNTRY) <= COUNTRIES

View File

@@ -456,6 +456,10 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
# This step is necessary to prevent `user_input` from being passed through
return await self.async_step_continue_zigbee()
def _extra_zha_hardware_options(self) -> dict[str, Any]:
"""Return extra ZHA hardware options."""
return {}
async def async_step_continue_zigbee(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -478,6 +482,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
},
"radio_type": "ezsp",
"flow_strategy": self._zigbee_flow_strategy,
**self._extra_zha_hardware_options(),
},
)
return self._continue_zha_flow(result)

View File

@@ -38,6 +38,7 @@ from homeassistant.const import (
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, issue_registry as ir, storage
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.http import (
KEY_ALLOW_CONFIGURED_CORS,
KEY_AUTHENTICATED, # noqa: F401
@@ -109,7 +110,7 @@ HTTP_SCHEMA: Final = vol.All(
cv.deprecated(CONF_BASE_URL),
vol.Schema(
{
vol.Optional(CONF_SERVER_HOST, default=_DEFAULT_BIND): vol.All(
vol.Optional(CONF_SERVER_HOST): vol.All(
cv.ensure_list, vol.Length(min=1), [cv.string]
),
vol.Optional(CONF_SERVER_PORT, default=SERVER_PORT): cv.port,
@@ -207,7 +208,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if conf is None:
conf = cast(ConfData, HTTP_SCHEMA({}))
server_host = conf[CONF_SERVER_HOST]
if CONF_SERVER_HOST in conf and is_hassio(hass):
ir.async_create_issue(
hass,
DOMAIN,
"server_host_may_break_hassio",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
translation_key="server_host_may_break_hassio",
)
server_host = conf.get(CONF_SERVER_HOST, _DEFAULT_BIND)
server_port = conf[CONF_SERVER_PORT]
ssl_certificate = conf.get(CONF_SSL_CERTIFICATE)
ssl_peer_certificate = conf.get(CONF_SSL_PEER_CERTIFICATE)

View File

@@ -1,5 +1,9 @@
{
"issues": {
"server_host_may_break_hassio": {
"description": "The `server_host` configuration option in the HTTP integration is prone to break the communication between Home Assistant Core and Supervisor, and will be removed in a future release.\n\nIf you are using this option to bind Home Assistant to specific network interfaces, please remove it from your configuration. Home Assistant will automatically bind to all available interfaces by default.\n\nIf you have specific networking requirements, consider using firewall rules or other network configuration to control access to Home Assistant.",
"title": "The `server_host` HTTP configuration may break Home Assistant Core - Supervisor communication"
},
"ssl_configured_without_configured_urls": {
"description": "Home Assistant detected that SSL has been set up on your instance, however, no custom external internet URL has been set.\n\nThis may result in unexpected behavior. Text-to-speech may fail, and integrations may not be able to connect back to your instance correctly.\n\nTo address this issue, go to Settings > System > Network; under the \"Home Assistant URL\" section, configure your new \"Internet\" and \"Local network\" addresses that match your new SSL configuration.",
"title": "SSL is configured without an external URL or internal URL"

View File

@@ -20,6 +20,11 @@
}
}
},
"sensor": {
"rf_message_rssi": {
"default": "mdi:signal"
}
},
"water_heater": {
"boiler": {
"state": {

View File

@@ -61,6 +61,16 @@ SENSOR_TYPES: tuple[IncomfortSensorEntityDescription, ...] = (
value_key="tap_temp",
entity_registry_enabled_default=False,
),
# A lower RSSI value is better
# A typical RSSI value is 28 for connection just in range
IncomfortSensorEntityDescription(
key="rf_message_rssi",
translation_key="rf_message_rssi",
state_class=SensorStateClass.MEASUREMENT,
value_key="rf_message_rssi",
extra_key="rfstatus_cntr",
entity_registry_enabled_default=False,
),
)

View File

@@ -76,6 +76,9 @@
}
},
"sensor": {
"rf_message_rssi": {
"name": "RSSI"
},
"tap_temperature": {
"name": "Tap temperature"
}

View File

@@ -72,7 +72,7 @@ class KegtronConfigFlow(ConfigFlow, domain=DOMAIN):
title=self._discovered_devices[address], data={}
)
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass, False):
address = discovery_info.address
if address in current_addresses or address in self._discovered_devices:

View File

@@ -85,7 +85,7 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN):
if discovery := self._discovered_adv:
self._discovered_advs[discovery.address] = discovery
else:
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass):
self._ble_device = discovery_info.device
address = discovery_info.address

View File

@@ -299,8 +299,8 @@ def _create_climate_ui(xknx: XKNX, conf: ConfigExtractor, name: str) -> XknxClim
group_address_active_state=conf.get_state_and_passive(CONF_GA_ACTIVE),
group_address_command_value_state=conf.get_state_and_passive(CONF_GA_VALVE),
sync_state=sync_state,
min_temp=conf.get(ClimateConf.MIN_TEMP),
max_temp=conf.get(ClimateConf.MAX_TEMP),
min_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MIN_TEMP),
max_temp=conf.get(CONF_TARGET_TEMPERATURE, ClimateConf.MAX_TEMP),
mode=climate_mode,
group_address_fan_speed=conf.get_write(CONF_GA_FAN_SPEED),
group_address_fan_speed_state=conf.get_state_and_passive(CONF_GA_FAN_SPEED),
@@ -486,7 +486,7 @@ class _KnxClimate(ClimateEntity, _KnxEntityBase):
ha_controller_modes.append(self._last_hvac_mode)
ha_controller_modes.append(HVACMode.OFF)
hvac_modes = list(set(filter(None, ha_controller_modes)))
hvac_modes = sorted(set(filter(None, ha_controller_modes)))
return (
hvac_modes
if hvac_modes

View File

@@ -13,7 +13,7 @@
"requirements": [
"xknx==3.10.0",
"xknxproject==3.8.2",
"knx-frontend==2025.10.26.81530"
"knx-frontend==2025.10.31.195356"
],
"single_config_entry": true
}

View File

@@ -12,7 +12,7 @@ from xknx.telegram import Telegram
from xknx.telegram.address import parse_device_group_address
from xknx.telegram.apci import GroupValueRead, GroupValueResponse, GroupValueWrite
from homeassistant.const import CONF_TYPE, CONF_VALUE_TEMPLATE, SERVICE_RELOAD
from homeassistant.const import CONF_TYPE, SERVICE_RELOAD
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
@@ -144,7 +144,6 @@ SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA = vol.Any(
ExposeSchema.EXPOSE_SENSOR_SCHEMA.extend(
{
vol.Optional(SERVICE_KNX_ATTR_REMOVE, default=False): cv.boolean,
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
}
),
vol.Schema(

View File

@@ -106,7 +106,7 @@ class KulerskyConfigFlow(ConfigFlow, domain=DOMAIN):
if discovery := self._discovery_info:
self._discovered_devices[discovery.address] = discovery
else:
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery in async_discovered_service_info(self.hass):
if (
discovery.address in current_addresses

View File

@@ -79,7 +79,7 @@ class Ld2410BleConfigFlow(ConfigFlow, domain=DOMAIN):
if discovery := self._discovery_info:
self._discovered_devices[discovery.address] = discovery
else:
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery in async_discovered_service_info(self.hass):
if (
discovery.address in current_addresses

View File

@@ -35,7 +35,7 @@ class LeaoneConfigFlow(ConfigFlow, domain=DOMAIN):
title=self._discovered_devices[address], data={}
)
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass, False):
address = discovery_info.address
if address in current_addresses or address in self._discovered_devices:

View File

@@ -85,7 +85,7 @@ class LedBleConfigFlow(ConfigFlow, domain=DOMAIN):
if discovery := self._discovery_info:
self._discovered_devices[discovery.address] = discovery
else:
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery in async_discovered_service_info(self.hass):
if (
discovery.address in current_addresses

View File

@@ -622,6 +622,7 @@ ENERGY_USAGE_SENSORS: tuple[ThinQEnergySensorEntityDescription, ...] = (
usage_period=USAGE_MONTHLY,
start_date_fn=lambda today: today,
end_date_fn=lambda today: today,
state_class=SensorStateClass.TOTAL_INCREASING,
),
ThinQEnergySensorEntityDescription(
key="last_month",

View File

@@ -13,5 +13,5 @@
"iot_class": "cloud_push",
"loggers": ["pylitterbot"],
"quality_scale": "bronze",
"requirements": ["pylitterbot==2024.2.7"]
"requirements": ["pylitterbot==2025.0.0"]
}

View File

@@ -408,6 +408,20 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity):
if not alarm_code or code == alarm_code:
return
current_context = (
self._context if hasattr(self, "_context") and self._context else None
)
user_id_from_context = current_context.user_id if current_context else None
self.hass.bus.async_fire(
"manual_alarm_bad_code_attempt",
{
"entity_id": self.entity_id,
"user_id": user_id_from_context,
"target_state": state,
},
)
raise ServiceValidationError(
"Invalid alarm code provided",
translation_domain=DOMAIN,

View File

@@ -527,4 +527,57 @@ DISCOVERY_SCHEMAS = [
vendor_id=(4447,),
product_id=(8194,),
),
MatterDiscoverySchema(
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
key="AqaraOccupancySensorBooleanStateConfigurationCurrentSensitivityLevel",
entity_category=EntityCategory.CONFIG,
translation_key="sensitivity_level",
options=["low", "standard", "high"],
device_to_ha={
0: "low",
1: "standard",
2: "high",
}.get,
ha_to_device={
"low": 0,
"standard": 1,
"high": 2,
}.get,
),
entity_class=MatterAttributeSelectEntity,
required_attributes=(
clusters.BooleanStateConfiguration.Attributes.CurrentSensitivityLevel,
),
vendor_id=(4447,),
product_id=(
8197,
8195,
),
),
MatterDiscoverySchema(
platform=Platform.SELECT,
entity_description=MatterSelectEntityDescription(
key="HeimanOccupancySensorBooleanStateConfigurationCurrentSensitivityLevel",
entity_category=EntityCategory.CONFIG,
translation_key="sensitivity_level",
options=["low", "standard", "high"],
device_to_ha={
0: "low",
1: "standard",
2: "high",
}.get,
ha_to_device={
"low": 0,
"standard": 1,
"high": 2,
}.get,
),
entity_class=MatterAttributeSelectEntity,
required_attributes=(
clusters.BooleanStateConfiguration.Attributes.CurrentSensitivityLevel,
),
vendor_id=(4619,),
product_id=(4097,),
),
]

View File

@@ -93,7 +93,7 @@ class InspectorBLEConfigFlow(ConfigFlow, domain=DOMAIN):
self._discovery_info = self._discovered_devices[address]
return await self.async_step_check_connection()
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass):
address = discovery_info.address
if address in current_addresses or address in self._discovered_devices:

View File

@@ -49,6 +49,15 @@ ATA_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
value_fn=lambda x: x.device.total_energy_consumed,
enabled=lambda x: x.device.has_energy_consumed_meter,
),
MelcloudSensorEntityDescription(
key="outside_temperature",
translation_key="outside_temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda x: x.device.outdoor_temperature,
enabled=lambda x: x.device.has_outdoor_temperature,
),
)
ATW_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
MelcloudSensorEntityDescription(

View File

@@ -75,7 +75,7 @@ class MelnorConfigFlow(ConfigFlow, domain=DOMAIN):
return self._create_entry(address)
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(
self.hass, connectable=True
):

View File

@@ -9,7 +9,7 @@
"iot_class": "cloud_push",
"loggers": ["pymiele"],
"quality_scale": "platinum",
"requirements": ["pymiele==0.5.6"],
"requirements": ["pymiele==0.6.0"],
"single_config_entry": true,
"zeroconf": ["_mieleathome._tcp.local."]
}

View File

@@ -72,7 +72,7 @@ class MoatConfigFlow(ConfigFlow, domain=DOMAIN):
title=self._discovered_devices[address], data={}
)
current_addresses = self._async_current_ids()
current_addresses = self._async_current_ids(include_ignore=False)
for discovery_info in async_discovered_service_info(self.hass, False):
address = discovery_info.address
if address in current_addresses or address in self._discovered_devices:

View File

@@ -66,6 +66,8 @@ from .const import (
CONF_BYTESIZE,
CONF_CLIMATES,
CONF_COLOR_TEMP_REGISTER,
CONF_CURRENT_TEMP_OFFSET,
CONF_CURRENT_TEMP_SCALE,
CONF_DATA_TYPE,
CONF_DEVICE_ADDRESS,
CONF_FAN_MODE_AUTO,
@@ -137,6 +139,8 @@ from .const import (
CONF_SWING_MODE_SWING_VERT,
CONF_SWING_MODE_VALUES,
CONF_TARGET_TEMP,
CONF_TARGET_TEMP_OFFSET,
CONF_TARGET_TEMP_SCALE,
CONF_TARGET_TEMP_WRITE_REGISTERS,
CONF_VERIFY,
CONF_VIRTUAL_COUNT,
@@ -159,8 +163,10 @@ from .modbus import DATA_MODBUS_HUBS, ModbusHub, async_modbus_setup
from .validators import (
duplicate_fan_mode_validator,
duplicate_swing_mode_validator,
ensure_and_check_conflicting_scales_and_offsets,
hvac_fixedsize_reglist_validator,
nan_validator,
not_zero_value,
register_int_list_validator,
struct_validator,
)
@@ -210,8 +216,10 @@ BASE_STRUCT_SCHEMA = BASE_COMPONENT_SCHEMA.extend(
]
),
vol.Optional(CONF_STRUCTURE): cv.string,
vol.Optional(CONF_SCALE, default=1): vol.Coerce(float),
vol.Optional(CONF_OFFSET, default=0): vol.Coerce(float),
vol.Optional(CONF_SCALE): vol.All(
vol.Coerce(float), lambda v: not_zero_value(v, "Scale cannot be zero.")
),
vol.Optional(CONF_OFFSET): vol.Coerce(float),
vol.Optional(CONF_PRECISION): cv.positive_int,
vol.Optional(
CONF_SWAP,
@@ -273,6 +281,18 @@ CLIMATE_SCHEMA = vol.All(
vol.Optional(CONF_TEMPERATURE_UNIT, default=DEFAULT_TEMP_UNIT): cv.string,
vol.Exclusive(CONF_HVAC_ONOFF_COIL, "hvac_onoff_type"): cv.positive_int,
vol.Exclusive(CONF_HVAC_ONOFF_REGISTER, "hvac_onoff_type"): cv.positive_int,
vol.Optional(CONF_CURRENT_TEMP_SCALE): vol.All(
vol.Coerce(float),
lambda v: not_zero_value(
v, "Current temperature scale cannot be zero."
),
),
vol.Optional(CONF_TARGET_TEMP_SCALE): vol.All(
vol.Coerce(float),
lambda v: not_zero_value(v, "Target temperature scale cannot be zero."),
),
vol.Optional(CONF_CURRENT_TEMP_OFFSET): vol.Coerce(float),
vol.Optional(CONF_TARGET_TEMP_OFFSET): vol.Coerce(float),
vol.Optional(
CONF_HVAC_ON_VALUE, default=DEFAULT_HVAC_ON_VALUE
): cv.positive_int,
@@ -385,6 +405,7 @@ CLIMATE_SCHEMA = vol.All(
),
},
),
ensure_and_check_conflicting_scales_and_offsets,
)
COVERS_SCHEMA = BASE_COMPONENT_SCHEMA.extend(

View File

@@ -50,6 +50,8 @@ from .const import (
CALL_TYPE_WRITE_REGISTER,
CALL_TYPE_WRITE_REGISTERS,
CONF_CLIMATES,
CONF_CURRENT_TEMP_OFFSET,
CONF_CURRENT_TEMP_SCALE,
CONF_FAN_MODE_AUTO,
CONF_FAN_MODE_DIFFUSE,
CONF_FAN_MODE_FOCUS,
@@ -97,8 +99,12 @@ from .const import (
CONF_SWING_MODE_SWING_VERT,
CONF_SWING_MODE_VALUES,
CONF_TARGET_TEMP,
CONF_TARGET_TEMP_OFFSET,
CONF_TARGET_TEMP_SCALE,
CONF_TARGET_TEMP_WRITE_REGISTERS,
CONF_WRITE_REGISTERS,
DEFAULT_OFFSET,
DEFAULT_SCALE,
DataType,
)
from .entity import ModbusStructEntity
@@ -166,6 +172,10 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
self._attr_min_temp = config[CONF_MIN_TEMP]
self._attr_max_temp = config[CONF_MAX_TEMP]
self._attr_target_temperature_step = config[CONF_STEP]
self._current_temp_scale = config[CONF_CURRENT_TEMP_SCALE]
self._current_temp_offset = config[CONF_CURRENT_TEMP_OFFSET]
self._target_temp_scale = config[CONF_TARGET_TEMP_SCALE]
self._target_temp_offset = config[CONF_TARGET_TEMP_OFFSET]
if CONF_HVAC_MODE_REGISTER in config:
mode_config = config[CONF_HVAC_MODE_REGISTER]
@@ -413,8 +423,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
target_temperature = (
float(kwargs[ATTR_TEMPERATURE]) - self._offset
) / self._scale
float(kwargs[ATTR_TEMPERATURE]) - self._target_temp_offset
) / self._target_temp_scale
if self._data_type in (
DataType.INT16,
DataType.INT32,
@@ -472,15 +482,25 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
self._target_temperature_register[
HVACMODE_TO_TARG_TEMP_REG_INDEX_ARRAY[self._attr_hvac_mode]
],
self._target_temp_scale,
self._target_temp_offset,
)
self._attr_current_temperature = await self._async_read_register(
self._input_type, self._address
self._input_type,
self._address,
self._current_temp_scale,
self._current_temp_offset,
)
# Read the HVAC mode register if defined
if self._hvac_mode_register is not None:
hvac_mode = await self._async_read_register(
CALL_TYPE_REGISTER_HOLDING, self._hvac_mode_register, raw=True
CALL_TYPE_REGISTER_HOLDING,
self._hvac_mode_register,
DEFAULT_SCALE,
DEFAULT_OFFSET,
raw=True,
)
# Translate the value received
@@ -499,7 +519,11 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
# Read the HVAC action register if defined
if self._hvac_action_register is not None:
hvac_action = await self._async_read_register(
self._hvac_action_type, self._hvac_action_register, raw=True
self._hvac_action_type,
self._hvac_action_register,
DEFAULT_SCALE,
DEFAULT_OFFSET,
raw=True,
)
# Translate the value received
@@ -517,6 +541,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
self._fan_mode_register
if isinstance(self._fan_mode_register, int)
else self._fan_mode_register[0],
DEFAULT_SCALE,
DEFAULT_OFFSET,
raw=True,
)
@@ -533,6 +559,8 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
self._swing_mode_register
if isinstance(self._swing_mode_register, int)
else self._swing_mode_register[0],
DEFAULT_SCALE,
DEFAULT_OFFSET,
raw=True,
)
@@ -551,7 +579,11 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
# in the mode register.
if self._hvac_onoff_register is not None:
onoff = await self._async_read_register(
CALL_TYPE_REGISTER_HOLDING, self._hvac_onoff_register, raw=True
CALL_TYPE_REGISTER_HOLDING,
self._hvac_onoff_register,
DEFAULT_SCALE,
DEFAULT_OFFSET,
raw=True,
)
if onoff == self._hvac_off_value:
self._attr_hvac_mode = HVACMode.OFF
@@ -562,7 +594,12 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
self._attr_hvac_mode = HVACMode.OFF
async def _async_read_register(
self, register_type: str, register: int, raw: bool | None = False
self,
register_type: str,
register: int,
scale: float,
offset: float,
raw: bool | None = False,
) -> float | None:
"""Read register using the Modbus hub slave."""
result = await self._hub.async_pb_call(
@@ -579,7 +616,7 @@ class ModbusThermostat(ModbusStructEntity, RestoreEntity, ClimateEntity):
return int(result.registers[0])
# The regular handling of the value
self._value = self.unpack_structure_result(result.registers)
self._value = self.unpack_structure_result(result.registers, scale, offset)
if not self._value:
self._attr_available = False
return None

View File

@@ -19,6 +19,8 @@ CONF_BYTESIZE = "bytesize"
CONF_CLIMATES = "climates"
CONF_BRIGHTNESS_REGISTER = "brightness_address"
CONF_COLOR_TEMP_REGISTER = "color_temp_address"
CONF_CURRENT_TEMP_OFFSET = "current_temp_offset"
CONF_CURRENT_TEMP_SCALE = "current_temp_scale"
CONF_DATA_TYPE = "data_type"
CONF_DEVICE_ADDRESS = "device_address"
CONF_FANS = "fans"
@@ -48,6 +50,8 @@ CONF_SWAP_BYTE = "byte"
CONF_SWAP_WORD = "word"
CONF_SWAP_WORD_BYTE = "word_byte"
CONF_TARGET_TEMP = "target_temp_register"
CONF_TARGET_TEMP_OFFSET = "target_temp_offset"
CONF_TARGET_TEMP_SCALE = "target_temp_scale"
CONF_TARGET_TEMP_WRITE_REGISTERS = "target_temp_write_registers"
CONF_FAN_MODE_REGISTER = "fan_mode_register"
CONF_FAN_MODE_ON = "state_fan_on"
@@ -181,4 +185,7 @@ LIGHT_MODBUS_SCALE_MIN = 0
LIGHT_MODBUS_SCALE_MAX = 100
LIGHT_MODBUS_INVALID_VALUE = 0xFFFF
DEFAULT_SCALE = 1.0
DEFAULT_OFFSET = 0
_LOGGER = logging.getLogger(__package__)

View File

@@ -17,7 +17,6 @@ from homeassistant.const import (
CONF_DELAY,
CONF_DEVICE_CLASS,
CONF_NAME,
CONF_OFFSET,
CONF_SCAN_INTERVAL,
CONF_SLAVE,
CONF_STRUCTURE,
@@ -50,7 +49,6 @@ from .const import (
CONF_MIN_VALUE,
CONF_NAN_VALUE,
CONF_PRECISION,
CONF_SCALE,
CONF_SLAVE_COUNT,
CONF_STATE_OFF,
CONF_STATE_ON,
@@ -62,6 +60,8 @@ from .const import (
CONF_VIRTUAL_COUNT,
CONF_WRITE_TYPE,
CONF_ZERO_SUPPRESS,
DEFAULT_OFFSET,
DEFAULT_SCALE,
SIGNAL_STOP_ENTITY,
DataType,
)
@@ -163,8 +163,6 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
self._swap = config[CONF_SWAP]
self._data_type = config[CONF_DATA_TYPE]
self._structure: str = config[CONF_STRUCTURE]
self._scale = config[CONF_SCALE]
self._offset = config[CONF_OFFSET]
self._slave_count = config.get(CONF_SLAVE_COUNT) or config.get(
CONF_VIRTUAL_COUNT, 0
)
@@ -181,8 +179,6 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
self._precision = config.get(CONF_PRECISION, 2)
else:
self._precision = config.get(CONF_PRECISION, 0)
if self._precision > 0 or self._scale != int(self._scale):
self._value_is_int = False
def _swap_registers(self, registers: list[int], slave_count: int) -> list[int]:
"""Do swap as needed."""
@@ -206,7 +202,12 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
registers.reverse()
return registers
def __process_raw_value(self, entry: float | str | bytes) -> str | None:
def __process_raw_value(
self,
entry: float | bytes,
scale: float = DEFAULT_SCALE,
offset: float = DEFAULT_OFFSET,
) -> str | None:
"""Process value from sensor with NaN handling, scaling, offset, min/max etc."""
if self._nan_value is not None and entry in (self._nan_value, -self._nan_value):
return None
@@ -215,7 +216,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
if entry != entry: # noqa: PLR0124
# NaN float detection replace with None
return None
val: float | int = self._scale * entry + self._offset
val: float | int = scale * entry + offset
if self._min_value is not None and val < self._min_value:
val = self._min_value
if self._max_value is not None and val > self._max_value:
@@ -226,7 +227,12 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
return str(round(val))
return f"{float(val):.{self._precision}f}"
def unpack_structure_result(self, registers: list[int]) -> str | None:
def unpack_structure_result(
self,
registers: list[int],
scale: float = DEFAULT_SCALE,
offset: float = DEFAULT_OFFSET,
) -> str | None:
"""Convert registers to proper result."""
if self._swap:
@@ -250,7 +256,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
# Apply scale, precision, limits to floats and ints
v_result = []
for entry in val:
v_temp = self.__process_raw_value(entry)
v_temp = self.__process_raw_value(entry, scale, offset)
if self._data_type != DataType.CUSTOM:
v_result.append(str(v_temp))
else:
@@ -258,7 +264,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
return ",".join(map(str, v_result))
# Apply scale, precision, limits to floats and ints
return self.__process_raw_value(val[0])
return self.__process_raw_value(val[0], scale, offset)
class ModbusToggleEntity(ModbusBaseEntity, ToggleEntity, RestoreEntity):

View File

@@ -12,6 +12,7 @@ from homeassistant.components.sensor import (
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_NAME,
CONF_OFFSET,
CONF_SENSORS,
CONF_UNIQUE_ID,
CONF_UNIT_OF_MEASUREMENT,
@@ -25,7 +26,14 @@ from homeassistant.helpers.update_coordinator import (
)
from . import get_hub
from .const import _LOGGER, CONF_SLAVE_COUNT, CONF_VIRTUAL_COUNT
from .const import (
_LOGGER,
CONF_SCALE,
CONF_SLAVE_COUNT,
CONF_VIRTUAL_COUNT,
DEFAULT_OFFSET,
DEFAULT_SCALE,
)
from .entity import ModbusStructEntity
from .modbus import ModbusHub
@@ -73,9 +81,13 @@ class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
self._coordinator: DataUpdateCoordinator[list[float | None] | None] | None = (
None
)
self._scale = entry.get(CONF_SCALE, DEFAULT_SCALE)
self._offset = entry.get(CONF_OFFSET, DEFAULT_OFFSET)
self._attr_native_unit_of_measurement = entry.get(CONF_UNIT_OF_MEASUREMENT)
self._attr_state_class = entry.get(CONF_STATE_CLASS)
self._attr_device_class = entry.get(CONF_DEVICE_CLASS)
if self._precision > 0 or self._scale != int(self._scale):
self._value_is_int = False
async def async_setup_slaves(
self, hass: HomeAssistant, slave_count: int, entry: dict[str, Any]
@@ -117,7 +129,9 @@ class ModbusRegisterSensor(ModbusStructEntity, RestoreSensor, SensorEntity):
self.async_write_ha_state()
return
self._attr_available = True
result = self.unpack_structure_result(raw_result.registers)
result = self.unpack_structure_result(
raw_result.registers, self._scale, self._offset
)
if self._coordinator:
result_array: list[float | None] = []
if result:

View File

@@ -15,6 +15,7 @@ from homeassistant.const import (
CONF_COUNT,
CONF_HOST,
CONF_NAME,
CONF_OFFSET,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_STRUCTURE,
@@ -25,16 +26,23 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from .const import (
CONF_CURRENT_TEMP_OFFSET,
CONF_CURRENT_TEMP_SCALE,
CONF_DATA_TYPE,
CONF_FAN_MODE_VALUES,
CONF_SCALE,
CONF_SLAVE_COUNT,
CONF_SWAP,
CONF_SWAP_BYTE,
CONF_SWAP_WORD,
CONF_SWAP_WORD_BYTE,
CONF_SWING_MODE_VALUES,
CONF_TARGET_TEMP_OFFSET,
CONF_TARGET_TEMP_SCALE,
CONF_VIRTUAL_COUNT,
DEFAULT_HUB,
DEFAULT_OFFSET,
DEFAULT_SCALE,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
PLATFORMS,
@@ -243,6 +251,46 @@ def duplicate_fan_mode_validator(config: dict[str, Any]) -> dict:
return config
def not_zero_value(val: float, errMsg: str) -> float:
"""Check value is not zero."""
if val == 0:
raise vol.Invalid(errMsg)
return val
def ensure_and_check_conflicting_scales_and_offsets(config: dict[str, Any]) -> dict:
"""Check for conflicts in scale/offset and ensure target/current temp scale/offset is set."""
config_keys = [
(CONF_SCALE, CONF_TARGET_TEMP_SCALE, CONF_CURRENT_TEMP_SCALE, DEFAULT_SCALE),
(
CONF_OFFSET,
CONF_TARGET_TEMP_OFFSET,
CONF_CURRENT_TEMP_OFFSET,
DEFAULT_OFFSET,
),
]
for generic_key, target_key, current_key, default_value in config_keys:
if generic_key in config and (target_key in config or current_key in config):
raise vol.Invalid(
f"Cannot use both '{generic_key}' and temperature-specific parameters "
f"('{target_key}' or '{current_key}') in the same configuration. "
f"Either the '{generic_key}' parameter (which applies to both temperatures) "
"or the new temperature-specific parameters, but not both."
)
if generic_key in config:
value = config.pop(generic_key)
config[target_key] = value
config[current_key] = value
if target_key not in config:
config[target_key] = default_value
if current_key not in config:
config[current_key] = default_value
return config
def duplicate_swing_mode_validator(config: dict[str, Any]) -> dict:
"""Control modbus climate swing mode values for duplicates."""
swing_modes: set[int] = set()

View File

@@ -26,8 +26,8 @@ from homeassistant.helpers.issue_registry import (
async_delete_issue,
)
from .actions import get_music_assistant_client, register_actions
from .const import ATTR_CONF_EXPOSE_PLAYER_TO_HA, DOMAIN, LOGGER
from .services import get_music_assistant_client, register_actions
if TYPE_CHECKING:
from music_assistant_models.event import MassEvent
@@ -238,12 +238,14 @@ async def _client_listen(
hass.async_create_task(hass.config_entries.async_reload(entry.entry_id))
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(
hass: HomeAssistant, entry: MusicAssistantConfigEntry
) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
mass_entry_data: MusicAssistantEntryData = entry.runtime_data
mass_entry_data = entry.runtime_data
mass_entry_data.listen_task.cancel()
await mass_entry_data.mass.disconnect()
@@ -251,7 +253,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_remove_config_entry_device(
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
hass: HomeAssistant,
config_entry: MusicAssistantConfigEntry,
device_entry: dr.DeviceEntry,
) -> bool:
"""Remove a config entry from a device."""
player_id = next(

View File

@@ -115,6 +115,13 @@ QUEUE_OPTION_MAP = {
MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE,
}
REPEAT_MODE_MAPPING_TO_HA = {
MassRepeatMode.OFF: RepeatMode.OFF,
MassRepeatMode.ONE: RepeatMode.ONE,
MassRepeatMode.ALL: RepeatMode.ALL,
# UNKNOWN is intentionally not mapped - will return None
}
SERVICE_PLAY_MEDIA_ADVANCED = "play_media"
SERVICE_PLAY_ANNOUNCEMENT = "play_announcement"
SERVICE_TRANSFER_QUEUE = "transfer_queue"
@@ -657,7 +664,7 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity):
# player has an MA queue active (either its own queue or some group queue)
self._attr_app_id = DOMAIN
self._attr_shuffle = queue.shuffle_enabled
self._attr_repeat = queue.repeat_mode.value
self._attr_repeat = REPEAT_MODE_MAPPING_TO_HA.get(queue.repeat_mode)
if not (cur_item := queue.current_item):
# queue is empty
return

View File

@@ -19,7 +19,11 @@ from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL
from .coordinator import NASwebCoordinator
from .nasweb_data import NASwebData
PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.SWITCH]
PLATFORMS: list[Platform] = [
Platform.ALARM_CONTROL_PANEL,
Platform.SENSOR,
Platform.SWITCH,
]
NASWEB_CONFIG_URL = "https://{host}/page"

View File

@@ -0,0 +1,154 @@
"""Platform for NASweb alarms."""
from __future__ import annotations
import logging
import time
from webio_api import Zone as NASwebZone
from webio_api.const import STATE_ZONE_ALARM, STATE_ZONE_ARMED, STATE_ZONE_DISARMED
from homeassistant.components.alarm_control_panel import (
DOMAIN as DOMAIN_ALARM_CONTROL_PANEL,
AlarmControlPanelEntity,
AlarmControlPanelEntityFeature,
AlarmControlPanelState,
CodeFormat,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
import homeassistant.helpers.entity_registry as er
from homeassistant.helpers.typing import DiscoveryInfoType
from homeassistant.helpers.update_coordinator import (
BaseCoordinatorEntity,
BaseDataUpdateCoordinatorProtocol,
)
from . import NASwebConfigEntry
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
_LOGGER = logging.getLogger(__name__)
ALARM_CONTROL_PANEL_TRANSLATION_KEY = "zone"
NASWEB_STATE_TO_HA_STATE = {
STATE_ZONE_ALARM: AlarmControlPanelState.TRIGGERED,
STATE_ZONE_ARMED: AlarmControlPanelState.ARMED_AWAY,
STATE_ZONE_DISARMED: AlarmControlPanelState.DISARMED,
}
async def async_setup_entry(
hass: HomeAssistant,
config: NASwebConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up alarm control panel platform."""
coordinator = config.runtime_data
current_zones: set[int] = set()
@callback
def _check_entities() -> None:
received_zones: dict[int, NASwebZone] = {
entry.index: entry for entry in coordinator.webio_api.zones
}
added = {i for i in received_zones if i not in current_zones}
removed = {i for i in current_zones if i not in received_zones}
entities_to_add: list[ZoneEntity] = []
for index in added:
webio_zone = received_zones[index]
if not isinstance(webio_zone, NASwebZone):
_LOGGER.error("Cannot create ZoneEntity without NASwebZone")
continue
new_zone = ZoneEntity(coordinator, webio_zone)
entities_to_add.append(new_zone)
current_zones.add(index)
async_add_entities(entities_to_add)
entity_registry = er.async_get(hass)
for index in removed:
unique_id = f"{DOMAIN}.{config.unique_id}.zone.{index}"
if entity_id := entity_registry.async_get_entity_id(
DOMAIN_ALARM_CONTROL_PANEL, DOMAIN, unique_id
):
entity_registry.async_remove(entity_id)
current_zones.remove(index)
else:
_LOGGER.warning("Failed to remove old zone: no entity_id")
coordinator.async_add_listener(_check_entities)
_check_entities()
class ZoneEntity(AlarmControlPanelEntity, BaseCoordinatorEntity):
"""Entity representing NASweb zone."""
_attr_has_entity_name = True
_attr_should_poll = False
_attr_translation_key = ALARM_CONTROL_PANEL_TRANSLATION_KEY
def __init__(
self, coordinator: BaseDataUpdateCoordinatorProtocol, nasweb_zone: NASwebZone
) -> None:
"""Initialize zone entity."""
super().__init__(coordinator)
self._zone = nasweb_zone
self._attr_name = nasweb_zone.name
self._attr_translation_placeholders = {"index": f"{nasweb_zone.index:2d}"}
self._attr_unique_id = (
f"{DOMAIN}.{self._zone.webio_serial}.zone.{self._zone.index}"
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._zone.webio_serial)},
)
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self._handle_coordinator_update()
def _set_attr_available(
self, entity_last_update: float, available: bool | None
) -> None:
if (
self.coordinator.last_update is None
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
):
self._attr_available = False
else:
self._attr_available = available if available is not None else False
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._attr_alarm_state = NASWEB_STATE_TO_HA_STATE[self._zone.state]
if self._zone.pass_type == 0:
self._attr_code_format = CodeFormat.TEXT
elif self._zone.pass_type == 1:
self._attr_code_format = CodeFormat.NUMBER
else:
self._attr_code_format = None
self._attr_code_arm_required = self._attr_code_format is not None
self._set_attr_available(self._zone.last_update, self._zone.available)
self.async_write_ha_state()
async def async_update(self) -> None:
"""Update the entity.
Only used by the generic entity update service.
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
"""
@property
def supported_features(self) -> AlarmControlPanelEntityFeature:
"""Return the list of supported features."""
return AlarmControlPanelEntityFeature.ARM_AWAY
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Arm away ZoneEntity."""
await self._zone.arm(code)
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Disarm ZoneEntity."""
await self._zone.disarm(code)

View File

@@ -23,6 +23,7 @@ _LOGGER = logging.getLogger(__name__)
KEY_INPUTS = "inputs"
KEY_OUTPUTS = "outputs"
KEY_ZONES = "zones"
class NotificationCoordinator:
@@ -103,6 +104,7 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
KEY_OUTPUTS: self.webio_api.outputs,
KEY_INPUTS: self.webio_api.inputs,
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
KEY_ZONES: self.webio_api.zones,
}
self.async_set_updated_data(data)
@@ -197,5 +199,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
KEY_OUTPUTS: self.webio_api.outputs,
KEY_INPUTS: self.webio_api.inputs,
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
KEY_ZONES: self.webio_api.zones,
}
self.async_set_updated_data(new_data)

View File

@@ -24,6 +24,11 @@
}
},
"entity": {
"alarm_control_panel": {
"zone": {
"name": "Zone {index}"
}
},
"sensor": {
"sensor_input": {
"name": "Input {index}",

View File

@@ -0,0 +1,76 @@
"""Support for Neato botvac connected vacuum cleaners."""
import logging
import aiohttp
from pybotvac import Account
from pybotvac.exceptions import NeatoException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_entry_oauth2_flow
from . import api
from .const import NEATO_DOMAIN, NEATO_LOGIN
from .hub import NeatoHub
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [
Platform.BUTTON,
Platform.CAMERA,
Platform.SENSOR,
Platform.SWITCH,
Platform.VACUUM,
]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up config entry."""
hass.data.setdefault(NEATO_DOMAIN, {})
if CONF_TOKEN not in entry.data:
raise ConfigEntryAuthFailed
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
try:
await session.async_ensure_token_valid()
except aiohttp.ClientResponseError as ex:
_LOGGER.debug("API error: %s (%s)", ex.code, ex.message)
if ex.code in (401, 403):
raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from ex
raise ConfigEntryNotReady from ex
neato_session = api.ConfigEntryAuth(hass, entry, implementation)
hass.data[NEATO_DOMAIN][entry.entry_id] = neato_session
hub = NeatoHub(hass, Account(neato_session))
await hub.async_update_entry_unique_id(entry)
try:
await hass.async_add_executor_job(hub.update_robots)
except NeatoException as ex:
_LOGGER.debug("Failed to connect to Neato API")
raise ConfigEntryNotReady from ex
hass.data[NEATO_LOGIN] = hub
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[NEATO_DOMAIN].pop(entry.entry_id)
return unload_ok

View File

@@ -0,0 +1,58 @@
"""API for Neato Botvac bound to Home Assistant OAuth."""
from __future__ import annotations
from asyncio import run_coroutine_threadsafe
from typing import Any
import pybotvac
from homeassistant import config_entries, core
from homeassistant.components.application_credentials import AuthImplementation
from homeassistant.helpers import config_entry_oauth2_flow
class ConfigEntryAuth(pybotvac.OAuthSession): # type: ignore[misc]
"""Provide Neato Botvac authentication tied to an OAuth2 based config entry."""
def __init__(
self,
hass: core.HomeAssistant,
config_entry: config_entries.ConfigEntry,
implementation: config_entry_oauth2_flow.AbstractOAuth2Implementation,
) -> None:
"""Initialize Neato Botvac Auth."""
self.hass = hass
self.session = config_entry_oauth2_flow.OAuth2Session(
hass, config_entry, implementation
)
super().__init__(self.session.token, vendor=pybotvac.Neato())
def refresh_tokens(self) -> str:
"""Refresh and return new Neato Botvac tokens."""
run_coroutine_threadsafe(
self.session.async_ensure_token_valid(), self.hass.loop
).result()
return self.session.token["access_token"] # type: ignore[no-any-return]
class NeatoImplementation(AuthImplementation):
"""Neato implementation of LocalOAuth2Implementation.
We need this class because we have to add client_secret
and scope to the authorization request.
"""
@property
def extra_authorize_data(self) -> dict[str, Any]:
"""Extra data that needs to be appended to the authorize url."""
return {"client_secret": self.client_secret}
async def async_generate_authorize_url(self, flow_id: str) -> str:
"""Generate a url for the user to authorize.
We must make sure that the plus signs are not encoded.
"""
url = await super().async_generate_authorize_url(flow_id)
return f"{url}&scope=public_profile+control_robots+maps"

View File

@@ -0,0 +1,28 @@
"""Application credentials platform for neato."""
from pybotvac import Neato
from homeassistant.components.application_credentials import (
AuthorizationServer,
ClientCredential,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from . import api
async def async_get_auth_implementation(
hass: HomeAssistant, auth_domain: str, credential: ClientCredential
) -> config_entry_oauth2_flow.AbstractOAuth2Implementation:
"""Return auth implementation for a custom auth implementation."""
vendor = Neato()
return api.NeatoImplementation(
hass,
auth_domain,
credential,
AuthorizationServer(
authorize_url=vendor.auth_endpoint,
token_url=vendor.token_endpoint,
),
)

View File

@@ -0,0 +1,44 @@
"""Support for Neato buttons."""
from __future__ import annotations
from pybotvac import Robot
from homeassistant.components.button import ButtonEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import NEATO_ROBOTS
from .entity import NeatoEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Neato button from config entry."""
entities = [NeatoDismissAlertButton(robot) for robot in hass.data[NEATO_ROBOTS]]
async_add_entities(entities, True)
class NeatoDismissAlertButton(NeatoEntity, ButtonEntity):
"""Representation of a dismiss_alert button entity."""
_attr_translation_key = "dismiss_alert"
_attr_entity_category = EntityCategory.CONFIG
def __init__(
self,
robot: Robot,
) -> None:
"""Initialize a dismiss_alert Neato button entity."""
super().__init__(robot)
self._attr_unique_id = f"{robot.serial}_dismiss_alert"
async def async_press(self) -> None:
"""Press the button."""
await self.hass.async_add_executor_job(self.robot.dismiss_current_alert)

Some files were not shown because too many files have changed in this diff Show More