Compare commits

...

507 Commits

Author SHA1 Message Date
Paulus Schoutsen
176f9c9f94 Add decorator to define Python tools from Python functions 2025-08-17 20:59:10 +00:00
Pete Sage
9f17a8a943 Add tests and improve error handling for Sonos update_alarm service call (#150715) 2025-08-17 22:47:45 +02:00
Joost Lekkerkerker
b44c47cd80 Removing myself as codeowner of Enphase (#150811) 2025-08-17 22:35:23 +02:00
Joost Lekkerkerker
e80c090932 Pin gql to 3.5.3 (#150800) 2025-08-17 19:27:17 +02:00
Pete Sage
ff418f513a Add dialog mode select for Sonos Arc Ultra soundbar (#150637)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-17 17:15:29 +02:00
Joost Lekkerkerker
b222cc5889 Use lifecycle hook instead of storing callback in starline (#150707) 2025-08-17 17:08:35 +02:00
Yuxin Wang
db1707fd72 Mark config-flow-test-coverage as done in APCUPSD quality scale (#150733) 2025-08-17 17:08:25 +02:00
peteS-UK
6f6f5809d0 Fix volume step error in Squeezebox media player (#150760) 2025-08-17 17:07:23 +02:00
Aidan Timson
1f43f82ea6 Update systembridgeconnector to 4.1.10 (#150736) 2025-08-17 17:03:46 +02:00
Yevhenii Vaskivskyi
942274234e Add asusrouter logger definition to asuswrt (#150747) 2025-08-17 16:59:02 +02:00
Maciej Bieniek
f03955b773 NextDNS tests improvements (#150791) 2025-08-17 16:56:25 +02:00
jan iversen
27ac375183 Remove unused strings in modbus (#150795) 2025-08-17 16:21:28 +02:00
Jamin
c951728767 VOIP RTP cleanup (#150490) 2025-08-17 16:16:20 +02:00
Paulus Schoutsen
3496494290 Remove filters from device analytics payload (#150771) 2025-08-17 16:15:02 +02:00
jan iversen
e90183391e Modbus: Delay start after connection is made. (#150526) 2025-08-17 16:09:24 +02:00
Joost Lekkerkerker
90558c517b Add info to Bravia device (#150690) 2025-08-17 15:30:46 +02:00
epenet
7fba94747e Add Tuya test fixtures (#150793) 2025-08-17 14:05:58 +02:00
Thomas Schamm
3b4b478afa Fix for bosch_shc: 'device_registry.async_get_or_create' referencing a non existing 'via_device' (#150756) 2025-08-17 10:49:04 +02:00
Joakim Plate
a3640c5664 feat: switch to model id for togrill (#150750) 2025-08-17 06:30:05 +02:00
Michael
246a181ad4 Fix restrict-task-creation workflow (#150774) 2025-08-17 01:56:56 +02:00
Thomas Schamm
d642ecb302 Bump boschshcpy to 0.2.107 (#150754) 2025-08-17 00:37:44 +02:00
Yevhenii Vaskivskyi
53889165b5 Bump asusrouter to 1.19.0 (#150742) 2025-08-16 21:32:27 +02:00
Marc Mueller
fe32e74910 Update charset-normalizer to 3.4.3 (#150770) 2025-08-16 21:31:14 +02:00
dontinelli
a71ae4db37 Add min/max values as extra attributes for measurements for fyta (#150562) 2025-08-16 20:49:55 +02:00
Marc Mueller
0d5ebdb692 Update hassfest package exceptions (#150744) 2025-08-16 12:52:26 +02:00
Denis Shulyaka
80e720f663 Add external tools support for chat log (#150461) 2025-08-16 12:20:20 +02:00
epenet
616b031df8 Use constants in Tuya tests (#150739) 2025-08-16 11:00:08 +02:00
Tom
bcdece4455 Add additional sensors to airOS (#150712) 2025-08-16 08:43:47 +02:00
Joost Lekkerkerker
1aa3efaf8a Add support for fineDustSensor capability in SmartThings (#150714) 2025-08-16 08:41:28 +02:00
Luke Lashley
7f16b11776 Improve roborock resume cleaning logic (#150726) 2025-08-16 08:40:46 +02:00
Maciej Bieniek
078b7224fc Add "bypass age verification" switch to NextDNS integration (#150716) 2025-08-15 21:46:06 +03:00
Denis Shulyaka
d5970e7733 Anthropic thinking content (#150341) 2025-08-15 15:52:36 +02:00
Joost Lekkerkerker
d5a74892e6 Remove unnecessary hass assignment in coordinators (#150696)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-15 15:52:13 +02:00
Joost Lekkerkerker
793a829236 Add serial number to Vodafone Station device (#150709) 2025-08-15 15:52:01 +02:00
G Johansson
7670146faf Improve handling decode errors in rest (#150699) 2025-08-15 15:51:48 +02:00
Nick Kuiper
eaedefe105 Update bluecurrent-api to 1.3.1 (#150559) 2025-08-15 15:45:40 +02:00
Marc Mueller
4f20776e0e Add check for dependency package names in hassfest (#150630) 2025-08-15 15:44:47 +02:00
epenet
6c21a14be4 Add binary sensor to 1-Wire DS2405 (#150679) 2025-08-15 15:37:34 +02:00
Alex Thompson
9015743483 Bump tilt-ble to 0.3.1 (#150711) 2025-08-15 15:37:08 +02:00
Thomas D
2a62e033dd Add binary sensor platform to qbus integration (#149975) 2025-08-15 15:35:51 +02:00
Joost Lekkerkerker
f72f2a326a Add MAC address to Modern forms devices (#150698) 2025-08-15 15:34:31 +02:00
Joost Lekkerkerker
61de50dfc0 Add hw_version to Point device (#150704) 2025-08-15 15:34:10 +02:00
Joost Lekkerkerker
ef7ed026db Add serial number to Ondilo ICO (#150702) 2025-08-15 15:33:13 +02:00
Joost Lekkerkerker
abdb48e7ce Add serial number to Nobo hub devices (#150700) 2025-08-15 15:32:43 +02:00
Joost Lekkerkerker
9646aa232a Add serial number to Zeversolar device (#150710) 2025-08-15 15:31:29 +02:00
Joost Lekkerkerker
635cfe7d17 Remove hass assignment in Openhome (#150703) 2025-08-15 15:30:01 +02:00
Joost Lekkerkerker
1e2f7cadc7 Add unregister hook to Vera (#150708) 2025-08-15 15:27:49 +02:00
Tom
94e9f32da5 Bump airOS to 0.3.0 (#150693) 2025-08-15 15:24:23 +02:00
Maciej Bieniek
b7ba99ed17 Bump nextdns to version 4.1.0 (#150706) 2025-08-15 15:24:05 +02:00
Joost Lekkerkerker
ebbeef8021 Add mac to Ambient station device (#150689) 2025-08-15 15:15:22 +02:00
Joost Lekkerkerker
8da75490c0 Add hw_version to RainMachine device (#150705) 2025-08-15 15:04:59 +02:00
Joost Lekkerkerker
bc89e8fd3c Move Notion hardware revision to hw_version (#150701) 2025-08-15 15:03:30 +02:00
Joost Lekkerkerker
602497904b Set firmware version to the right field in Guardian (#150697) 2025-08-15 15:01:42 +02:00
G Johansson
facf217b99 Fix missing labels for subdiv in workday (#150684) 2025-08-15 13:59:35 +02:00
Joost Lekkerkerker
b300654e15 Add serial number to Dremel device (#150691) 2025-08-15 13:58:44 +02:00
Joost Lekkerkerker
a742125f13 Add serial number to Emonitor device (#150692) 2025-08-15 13:58:23 +02:00
Thomas D
64768b1036 Fix re-auth flow for Volvo integration (#150478) 2025-08-15 13:58:03 +02:00
Petro31
792bb5781d Fix optimistic set to false for template entities (#150421) 2025-08-15 13:53:48 +02:00
Jan Bouwhuis
7bd126dc8e Assert the MQTT config entry is reloaded on subentry creation and mutation (#150636) 2025-08-15 13:04:12 +02:00
Joakim Sørensen
83ee380b17 Bump hass-nabucasa from 0.111.2 to 1.0.0 and refactor related code (#150566) 2025-08-15 11:35:52 +02:00
Ludovic BOUÉ
58f8b3c401 Bump Python Matter server to 8.1.0 (#150631) 2025-08-15 11:29:49 +02:00
Marc Mueller
2a6d1180f4 Update py-madvr2 to 1.6.40 (#150647) 2025-08-15 08:13:22 +02:00
J. Nick Koston
00b765893d Bump onvif-zeep-async to 4.0.3 (#150663) 2025-08-15 05:49:31 +02:00
karwosts
3e9e9b0489 Fix demo media_player.browse browsing (#150669) 2025-08-15 05:47:55 +02:00
Luke Lashley
25f7c02498 Bump python-snoo to 0.8.3 (#150670) 2025-08-15 05:46:59 +02:00
Manu
a785f3d509 Increase test coverage of Habitica (#150671) 2025-08-15 05:45:42 +02:00
J. Nick Koston
9f36b2dcde Bump protobuf to 6.32.0 (#150667) 2025-08-15 02:31:10 +02:00
Michael Hansen
57265ac648 Add fuzzy matching to default agent (#150595) 2025-08-14 16:28:42 -05:00
J. Nick Koston
f5fe53a67f Bump uiprotect to 7.21.1 (#150657) 2025-08-14 16:16:04 -05:00
Arie Catsman
7e6ceee9d1 Add IQ Meter Collar and C6 Combiner to enphase_envoy integration (#150649) 2025-08-14 15:34:37 -05:00
DeerMaximum
9c21965a34 Add diagnostics to NINA (#150638) 2025-08-14 19:57:33 +02:00
rwrozelle
1ea740d81c Add media_player add off on capability to esphome (#147990) 2025-08-14 12:07:01 -05:00
rwrozelle
6e98446523 Media player API enumeration alignment and feature flags (#149597)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-08-14 11:24:43 -05:00
Ludovic BOUÉ
2248584a0f Add Matter Electrical measurements additional attributes (#150188)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-14 17:07:18 +02:00
Martin Hjelmare
d9b6f82639 Add Z-Wave Fortrezz SSA2 discovery (#150629) 2025-08-14 17:37:44 +03:00
DeerMaximum
3eecfa8e57 Set PARALLEL_UPDATES in NINA (#150635) 2025-08-14 16:36:04 +02:00
epenet
382e7dfd39 Add Tuya test fixtures (#150622) 2025-08-14 14:51:43 +02:00
Joost Lekkerkerker
5358c89bfd Add fixtures for one door refrigerator in SmartThings (#150632) 2025-08-14 14:51:20 +02:00
Tom
e6103fdcf4 Bump airOS to 0.2.11 (#150627) 2025-08-14 13:43:32 +02:00
Martin Dybal
02dca5f0ad Fix type annotation for climate _attr_current_humidity (#150615) 2025-08-14 12:55:54 +02:00
Ludovic BOUÉ
cc4b9e0eca Extend UnitOfReactivePower with 'mvar' (#150415) 2025-08-14 11:46:06 +02:00
Joost Lekkerkerker
7e28e3dcd3 Add sw_version to JustNimbus device (#150592) 2025-08-14 09:31:43 +02:00
Joakim Plate
bb3d571887 Make sure we update the api version in philips_js discovery (#150604) 2025-08-14 09:30:47 +02:00
Joakim Plate
5a789cbbc8 Bump togrill to 0.7.0 in preperation for number (#150611) 2025-08-14 09:30:02 +02:00
dependabot[bot]
4954c2a84b Bump actions/ai-inference from 1.2.8 to 2.0.0 (#150619)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-14 09:27:18 +02:00
G Johansson
f28e9f60ee Use runtime_data in pvpc_hourly_pricing (#150565) 2025-08-14 01:03:04 +02:00
Luke Lashley
6a4bf4ec72 Bump python-snoo to 0.8.2 (#150569) 2025-08-14 00:12:18 +02:00
Luke Lashley
12706178c2 Change Snoo to use MQTT instead of PubNub (#150570) 2025-08-14 00:11:52 +02:00
Samuel Xiao
ed39b18d94 Add cover platform for switchbot cloud (#148993) 2025-08-14 00:10:19 +02:00
G Johansson
9999807891 Use OptionsFlowWithReload in coinbase (#150587) 2025-08-13 23:48:20 +02:00
Arie Catsman
b5db0e98b4 Bump pyenphase to 2.3.0 (#150600) 2025-08-13 23:44:07 +02:00
Åke Strandberg
f58b2177a2 Bump pymiele to 0.5.4 (#150605) 2025-08-13 23:42:47 +02:00
G Johansson
4f64014816 Add wind gust sensor to OpenWeatherMap (#150607) 2025-08-13 23:34:12 +02:00
Michael Hansen
cf68214c4d Bump hassil to 3.1.0 (#150584) 2025-08-13 20:58:57 +02:00
Marc Mueller
b3d3284f5c Update types packages (#150586) 2025-08-13 20:55:22 +02:00
Marc Mueller
12c346f550 Update orjson to 3.11.2 (#150588) 2025-08-13 20:53:55 +02:00
HarvsG
bda82e19a5 Pi_hole - Account for auth succeeding when it shouldn't (#150413) 2025-08-13 20:53:21 +02:00
Marc Mueller
f7726a7563 Update pre-commit-hooks to 6.0.0 (#150583) 2025-08-13 19:23:26 +02:00
Michael Hansen
2c0ed2cbfe Add intent for setting fan speed (#150576) 2025-08-13 18:57:25 +02:00
Marc Mueller
13376ef896 Fix RuntimeWarning in asuswrt tests (#150580) 2025-08-13 18:33:02 +02:00
Marc Mueller
d18cc3d6c3 Fix RuntimeWarning in squeezebox tests (#150582) 2025-08-13 18:32:50 +02:00
karwosts
b40aab479a Change monetary translation to 'Monetary balance' (#150054) 2025-08-13 17:21:36 +02:00
Michael Hansen
721f9a40d8 Add volume up/down intents for media players (#150443) 2025-08-13 09:35:37 -05:00
Ludovic BOUÉ
eb4b75a9a7 Extend UnitOfApparentPower with 'mVA' (#150422)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-13 15:56:04 +02:00
epenet
b40f381164 Add Tuya test fixture (#150557) 2025-08-13 14:09:19 +02:00
epenet
51413b7a8d Ensure Tuya fans have at least one valid DPCode (#150550) 2025-08-13 13:40:11 +02:00
Foscam-wangzhengyu
ff694a0058 Foscam Add prompt language and modify the default port to a more compatible (#150536) 2025-08-13 13:21:39 +02:00
Joakim Sørensen
eea04558a9 Move alexa access token updates to new handler (#150466)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-13 13:21:28 +02:00
starkillerOG
5ad2a27918 Use camera name in Reolink tests (#150555) 2025-08-13 13:06:12 +02:00
G Johansson
f39305f64e Remove deprecated json helper constants and function (#150111) 2025-08-13 12:42:00 +02:00
karwosts
7fba0ca2c0 Add 'all' option to light/switch group config flow (#149671) 2025-08-13 12:34:58 +02:00
Pete Sage
51fbccd125 Fix Sonos CI issue part 2 (#150529)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-08-13 12:26:24 +02:00
G Johansson
5fc2e6ed53 Add async_update_reload_and_abort to config entry subentries (#149768)
Co-authored-by: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com>
2025-08-13 11:59:37 +02:00
Christopher Fenner
5a7f7d90a0 move Volvo car connection status sensor to diagnostic section (#150487) 2025-08-13 11:45:05 +02:00
Luke Lashley
6d34d34ce1 Bump python-snoo to 0.8.1 (#150530) 2025-08-13 11:38:18 +02:00
dependabot[bot]
6454f40c3c Bump github/codeql-action from 3.29.8 to 3.29.9 (#150539)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-13 11:36:43 +02:00
epenet
53e40a6b8c Ensure Tuya humidifiers have at least one valid DPCode (#150546) 2025-08-13 11:25:59 +02:00
J. Nick Koston
8a54a1d95c Bump aioesphomeapi to 39.0.0 (#150523) 2025-08-13 03:17:20 -05:00
Yevhenii Vaskivskyi
8a52e9ca01 Bump asusrouter to 1.18.2 (#150541) 2025-08-13 10:46:08 +03:00
Robert Resch
d9ca253c6c Bump uv to 0.8.9 (#150542) 2025-08-13 09:45:54 +02:00
Pete Sage
b7853ea9bd Fix Sonos CI Issue (#150518)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 23:18:59 +02:00
J. Nick Koston
d19e410ea8 Bump aiodhcpwatcher to 1.2.1 (#150519) 2025-08-12 22:07:25 +02:00
Shay Levy
83f911e4ff Bump aiowebostv to 0.7.5 (#150514) 2025-08-12 22:53:56 +03:00
jan iversen
452322e971 Modbus: Do not remove non-duplicate error log. (#150511) 2025-08-12 21:16:43 +02:00
Manu
6fa7c6cb81 Add party to Habitica (#149608) 2025-08-12 20:51:12 +02:00
J. Nick Koston
ed6072d46b Bump bleak-retry-connector to 4.0.1 (#150515) 2025-08-12 20:49:43 +02:00
Yevhenii Vaskivskyi
9fdc632780 Switch asuswrt http(s) library to asusrouter package (#150426) 2025-08-12 20:45:39 +02:00
Norbert Rittel
4d426c31f9 Fix missing sentence-case in hydrawise (#150513) 2025-08-12 20:10:43 +02:00
jan iversen
ea946c90b3 Modbus: Cancel connect background task if stopping/restarting. (#150507) 2025-08-12 19:38:17 +02:00
Tom
fb68b2d454 Bump airOS to 0.2.8 (#150504) 2025-08-12 19:27:27 +02:00
Ludovic BOUÉ
2ebe0a929e Matter SmokeCoAlarm SelfTestRequest (#150497) 2025-08-12 19:10:55 +02:00
Manu
c1e5a7efc9 Add icons to Sleep as Android sensor entities (#150451) 2025-08-12 18:23:27 +02:00
dependabot[bot]
561ef7015c Bump actions/checkout from 4.2.2 to 5.0.0 (#150494)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-12 18:17:44 +02:00
Robin Lintermann
b4270e019e Bump pysmarlaapi to 0.9.2 (#150496) 2025-08-12 18:14:32 +02:00
Joost Lekkerkerker
614bf96fb9 Add model_id to Philips Hue (#150499) 2025-08-12 18:09:14 +02:00
Tucker Kern
ca290ee631 Implement Snapcast grouping with standard HA actions (#146855)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 18:07:29 +02:00
epenet
ad3174f6e6 Rename Tuya parsing models (#150498) 2025-08-12 18:02:26 +02:00
jan iversen
218b0738ca Modbus: Remove wrong comment on non-existing parameter. (#150501) 2025-08-12 18:00:51 +02:00
Joakim Plate
98e6e20079 Mock habluetooth adapters (#148919) 2025-08-12 10:46:31 -05:00
Norbert Rittel
89aa349881 Fix spelling of "an HS color command" in template (#150495) 2025-08-12 17:18:27 +02:00
wedsa5
07930b12d0 Fix brightness command not sent when in white color mode (#150439)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-12 16:36:52 +02:00
Renat Sibgatulin
711afa306c Add number platform for LED brightness to air-Q (#150492) 2025-08-12 15:39:28 +02:00
epenet
a3904ce60c Sort Tuya DPCodes alphabetically (#150477) 2025-08-12 15:28:42 +02:00
hanwg
455cf2fb42 Add notify platform for Telegram bot (#149853)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 15:24:13 +02:00
Aarni Koskela
072ae2b955 ruuvitag_ble: add new sensors (#150435)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-12 14:19:15 +02:00
epenet
2b70639b11 Add device registry snapshots to Tuya (#150482)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-12 14:04:36 +02:00
Åke Strandberg
2612dbeb9b Add missing boost2 code for Miele hobs (#150481) 2025-08-12 13:58:38 +02:00
Matrix
7ebdd24224 Bump yolink api to 0.5.8 (#150480) 2025-08-12 13:55:04 +02:00
Martin Hjelmare
66ff1cf005 Improve Z-Wave manual config flow step description (#150479) 2025-08-12 13:47:11 +02:00
David
08aae4bf49 Fix error of the Powerfox integration in combination with the new Powerfox FLOW adapter (#150429)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-12 12:45:21 +02:00
Etienne C.
313b5a483c Remove rounding of Waze duration sensor (#150424) 2025-08-12 12:20:48 +02:00
Arie Catsman
8edbcc92d3 Fix enphase_envoy non existing via device warning at first config. (#149010)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 11:55:43 +02:00
peteS-UK
067cab71fa Additional Fix error on startup when no Apps or Radio plugins are installed for Squeezebox (#150475) 2025-08-12 11:55:21 +02:00
Nippey
596e4883b1 Add more sensors to Tuya weather station (#150442)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-12 11:33:51 +02:00
yufeng
fb4a452872 Add supply frequency sensors to Tuya energy monitoring devices (#149320)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-12 11:02:03 +02:00
yufeng
5b232226e9 Add timers and switches to Tuya irrigation systems (#149236) 2025-08-12 10:53:08 +02:00
J. Nick Koston
db81610983 Bump aioesphomeapi to 38.2.1 (#150455) 2025-08-12 10:46:53 +02:00
epenet
8f5c8caf07 Add mute switch to Tuya smoke detectors (#150469) 2025-08-12 10:45:39 +02:00
Matrix
f6af524ddf Fix YoLink valve state when device running in class A mode (#150456) 2025-08-12 10:42:40 +02:00
Norbert Rittel
e0a8c9b458 Fix missing sentence-casing in somfy_mylink (#150463) 2025-08-12 10:30:38 +02:00
Cyrill Raccaud
c46412ee5b Bump cookidoo-api to 0.14.0 (#150450) 2025-08-12 09:51:39 +02:00
Mike Degatano
a06df2a680 Make disk_lifetime issue into a repair (#150140) 2025-08-12 08:39:37 +02:00
epenet
68fbcc8665 Add pymodbus to package constraints (#150419)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-12 00:50:05 +02:00
Manu
6cde5cfdcc Add diagnostics platform to Sleep as Android (#150447) 2025-08-11 23:47:07 +02:00
Wesley Vos
5605f5896a Remove the battery feature from supported features (#150101) 2025-08-11 23:26:27 +02:00
Manu
93c30f1b59 Add sensor platform to Sleep as Android (#150440) 2025-08-11 23:25:51 +02:00
Pete Sage
6e3ccbefc2 Add quality scale for Sonos (#144928) 2025-08-11 22:50:47 +02:00
Noah Husby
715dc12792 Add media browsing to Russound RIO (#148248) 2025-08-11 22:40:40 +02:00
Denis Shulyaka
9cae0e0acc OpenAI thinking content (#150340) 2025-08-11 22:28:36 +02:00
Kevin David
e13702d9b1 Bump python-snoo to 0.7.0 (#150434) 2025-08-11 22:25:41 +02:00
Tsvi Mostovicz
3b358df9e7 Jewish Calendar add coordinator (#141456)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-11 22:22:13 +02:00
Foscam-wangzhengyu
e394435d7c Add more Foscam switches (#147409)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-11 22:14:32 +02:00
Aarni Koskela
9e398ffc10 Bump to ruuvitag-ble==0.2.1 (#150436) 2025-08-11 22:05:44 +02:00
tdfountain
065a53a90d Add quality scale and set Platinum for NUT (#143269)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-08-11 21:27:33 +02:00
Manu
91f6b8e1fe Add Sleep as Android integration (#142569) 2025-08-11 20:03:37 +02:00
Michael Hansen
1a9d1a9649 Handle non-streaming TTS case correctly (#150218) 2025-08-11 11:47:29 -05:00
MB901
cb7c7767b5 Add model_id for Freebox integration (#150430) 2025-08-11 18:46:57 +02:00
CubeZ2mDeveloper
d02029143c Add SONOFF Dongle Lite MG21 discovery support in ZHA (#148813)
Co-authored-by: zetao.zheng <1050713479@qq.com>
2025-08-11 12:41:41 -04:00
Robin Lintermann
3eda687d30 Smarla integration sensor platform (#145748) 2025-08-11 17:08:07 +02:00
Jamie Magee
7688c367cc Remove coinbase v2 API support (#148387)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-11 16:58:36 +02:00
Manu
a1dc3f3eac Bump habiticalib to version 0.4.2 (#150417) 2025-08-11 15:51:22 +02:00
Martin Hjelmare
d135d08813 Lower Z-Wave firmware check delay (#150411) 2025-08-11 14:09:04 +02:00
Brett Adams
9595759fd1 Add stale device cleanup to Teslemetry (#144523) 2025-08-11 13:54:44 +02:00
Etienne C.
d54f979612 Add a coordinator to Waze Travel Time (#148585) 2025-08-11 13:20:18 +02:00
Paulus Schoutsen
531073acc0 Allow specifying multiple integrations (#150349) 2025-08-11 13:12:29 +02:00
Bouwe Westerdijk
73cbc962f9 Implement snapshot testing for Plugwise binary_sensor platform (#150375) 2025-08-11 13:11:24 +02:00
epenet
34b0b71375 Add Tuya snapshot tests for empty electricity RAW sensors (#150407) 2025-08-11 12:05:33 +02:00
Brett Adams
203c908730 Add charging and preconditioning actions to Teslemetry (#144184) 2025-08-11 11:59:39 +02:00
tronikos
23e6148d3b Create an issue if Opower utility is no longer supported (#150315) 2025-08-11 11:58:12 +02:00
epenet
2a5a66f9d5 Handle empty electricity RAW sensors in Tuya (#150406) 2025-08-11 11:55:47 +02:00
dependabot[bot]
84ce5d65e1 Bump github/codeql-action from 3.29.7 to 3.29.8 (#150405)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-11 11:50:00 +02:00
Bram Kragten
00c7838587 Update frontend to 20250811.0 (#150404) 2025-08-11 10:58:03 +02:00
Stefan Agner
d8b576c087 Rename local OAuth2 source (#150403) 2025-08-11 10:37:25 +02:00
Tomeroeni
330dce24c5 Bump aiounifi to version 86 (#150321) 2025-08-11 10:32:35 +02:00
karwosts
0089d3efa1 Support multiple for StateSelector (#146288) 2025-08-11 11:24:20 +03:00
Manuel Stahl
167e9c8f4a Update pystiebeleltron to 0.2.3 (#150339)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-11 09:43:09 +02:00
Manu
c7f5e25d41 Update quality scale to platinum 🏆️ for Uptime Kuma (#148951) 2025-08-10 23:36:57 +02:00
Florian von Garrel
7b5dd4a0ec Paperless-ngx: Disable entities by default and extended docs (#149473) 2025-08-10 23:36:36 +02:00
Denis Shulyaka
84de6aacfc Remove native field from conversation chatlog delta listeners (#150389) 2025-08-10 22:41:37 +02:00
epenet
9561c84920 Fix issue with Tuya suggested unit (#150394) 2025-08-10 22:39:00 +02:00
jan iversen
7572b2a669 Bump pymodbus to v3.11.1. (#150383) 2025-08-10 22:38:49 +02:00
dontinelli
b48409ab1b Add new sensors with battery data for solarlog (#150385)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-10 22:28:50 +02:00
Austin Mroczek
ab04e2c501 TotalConnect major test updates (#139672)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-10 22:26:43 +02:00
epenet
38e6a7c6d4 Add Tuya test fixtures (#150387) 2025-08-10 22:17:14 +02:00
Vincent Wolsink
c2b284de2d Add humidity (steamer) control to Huum (#150330)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-10 21:55:20 +02:00
epenet
b760bf342a Add frost protection and valve status to Tuya thermostats (#150177) 2025-08-10 21:37:14 +02:00
Yuxin Wang
79cfea3fea Use mock_setup_entry fixture for APCUPSD (#150392) 2025-08-10 21:35:47 +02:00
dontinelli
69ace08c01 Bump solarlog_cli to 0.5.0 (#150384) 2025-08-10 20:57:03 +02:00
Yuxin Wang
bf33e286d6 Add recovery test logic for connection failure for APCUPSD (#150382)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-10 20:32:25 +02:00
epenet
6b83effc5f Simplify DEVICE_MOCKS in Tuya (#150381) 2025-08-10 17:58:51 +02:00
epenet
2b158fe690 Add Tuya snapshots tests for kt category (air conditioner) (#150256) 2025-08-10 17:45:40 +02:00
epenet
712ddc03c8 Add Tuya snapshots tests for kj category (air purifier) (#150171) 2025-08-10 17:44:55 +02:00
epenet
efe519faad Add mute support to Tuya wg2 category (gateway) (#150122) 2025-08-10 17:44:26 +02:00
epenet
1b7cb418eb Add Tuya snapshots tests for cwysj category (pet water fountain) (#150121) 2025-08-10 17:44:00 +02:00
Yuxin Wang
c678bcd4f1 Split test_config_flow_duplicate tests into two separate ones for APCUPSD (#150379) 2025-08-10 17:35:48 +02:00
Marc Mueller
0eaea13e8d Update pylint to 3.3.8 + astroid to 3.3.11 (#150327) 2025-08-10 16:41:59 +02:00
Norbert Rittel
b1e4513f7d Capitalize "Ice Plus" as feature name in lg_thinq (#150370) 2025-08-10 15:14:40 +02:00
Steven Looman
6d7f8bb7d7 Remove unused string scan_interval in upnp component (#150372) 2025-08-10 15:14:14 +02:00
Norbert Rittel
b481aaba77 Fix wrong translation of unlock_inside_the_door in xiaomi_ble (#150371)
thanks
2025-08-10 11:45:24 +02:00
Alexandre CUER
d539f37aa4 Remove CONF_EXCLUDE_FEEDID constant from the emoncms integration (#150333) 2025-08-10 09:52:17 +02:00
J. Nick Koston
865b3a6646 Add raw advertisement data to Bluetooth WebSocket API (#150358) 2025-08-10 09:44:15 +02:00
Denis Shulyaka
1c603f968f Bump openai to 1.99.5 (#150342) 2025-08-10 09:41:55 +02:00
J. Nick Koston
d821d27730 Bump habluetooth to 5.0.1 (#150320) 2025-08-10 09:41:25 +02:00
tronikos
dfa060a7e1 Remove Mercury NZ Limited virtual integration (#150316) 2025-08-10 09:38:48 +02:00
Alexandre CUER
5262cca8e6 Use "device_id" instead of "slave" in modbus integration (#150200) 2025-08-10 09:31:26 +02:00
Yuxin Wang
2c36a74da5 Also test unique ID in config flow test for APCUPSD (#150362) 2025-08-10 07:49:25 +02:00
G Johansson
084cde6ecf Add base entity to workday (#150329) 2025-08-09 21:52:39 +02:00
Denis Shulyaka
3e34aa5fb7 Add thinking and native content to chatlog (#149699) 2025-08-09 15:26:19 +02:00
Pete Sage
268f0d9e03 Add Tests for Sonos Alarms (#150014) 2025-08-09 13:47:16 +02:00
Thomas D
f8d3bc1b89 Volvo: Skip unsupported API fields (#150285) 2025-08-09 12:24:53 +02:00
Philipp Waller
fb64ff1d17 Update knx-frontend to 2025.8.9.63154 (#150323) 2025-08-09 12:14:31 +02:00
steinmn
ff72faf83a Set suggested display precision on Volvo energy/fuel consumption sensors (#150296) 2025-08-09 07:48:49 +02:00
Tom
acb58c41eb Bump airOS to 0.2.7 supporting firmware 8.7.11 (#150298) 2025-08-09 07:48:05 +02:00
epenet
586b197fc3 Speedup Tuya snapshot tests (#150198) 2025-08-09 07:46:48 +02:00
Manu
5c1d16d582 Abort config flow if user has no friends in PlayStation Network (#150301) 2025-08-09 07:44:35 +02:00
Tom
73be4625ae Add sensor uom suggestions to airOS (#150303) 2025-08-09 07:43:51 +02:00
Andrew Jackson
775701133d Remove deprecated notify platform from Mastodon (#149735) 2025-08-09 01:17:48 +02:00
MB901
1af0282091 Add hardware version to FreeboxRouter device info (#150004) 2025-08-09 00:54:53 +02:00
Joakim Plate
c876bed33f Add ToGrill integration (#150075)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-09 00:24:54 +02:00
G Johansson
e9d39a826e Remove deprecated horizontal vane select from Sensibo (#150108) 2025-08-09 00:24:38 +02:00
Thomas55555
f9e1c07c04 Add event platform to Husqvarna Automower (#148212)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-08-09 00:07:47 +02:00
Renat Sibgatulin
c0bef51563 Refactor airq tests to mock the API class in a fixture (#149712) 2025-08-09 00:01:39 +02:00
Willem-Jan van Rootselaar
b41a9575af Add protected call for data retrieval (#150035) 2025-08-08 23:58:19 +02:00
Norbert Rittel
e585b3abd1 Fix missing sentence-casing of "AC failure" in bosch_alarm (#150279) 2025-08-08 23:33:55 +02:00
mbo18
5d2877f454 Add absolute humidity sensor to Awair integration (#150110) 2025-08-08 22:55:24 +02:00
Artur Pragacz
2d89c60ac5 Improve service schemas in unifiprotect (#150236) 2025-08-08 22:51:24 +02:00
Marco Gasparini
860a7b7d91 Fix Progettihwsw config flow (#150149) 2025-08-08 22:29:50 +02:00
Ludovic BOUÉ
5585376b40 Switchbot Hub Light level (#150147) 2025-08-08 22:13:23 +02:00
Thomas55555
c4cb70fc06 Handle HusqvarnaWSClientError (#150145) 2025-08-08 22:12:18 +02:00
Pete Sage
981ae39182 Fix dialog enhancement switch for Sonos Arc Ultra (#150116) 2025-08-08 22:11:32 +02:00
Alexandre CUER
dff4f79925 Remove useless strings from emoncms (#150182) 2025-08-08 22:00:48 +02:00
Manu
bf64e11960 Migrate unique_id only if monitor_id is present in Uptime Kuma (#150197) 2025-08-08 21:38:27 +02:00
Thomas D
823d20c67f Volvo: fix distance to empty battery (#150278) 2025-08-08 21:28:29 +02:00
Norbert Rittel
1a654cd35d Use common strings "Low"/"High" for more states in tuya (#150283) 2025-08-08 20:52:03 +02:00
Denis Shulyaka
13e592edaf Bump anthropic to 0.62.0 (#150284) 2025-08-08 20:51:49 +02:00
Norbert Rittel
94191239c6 Remove misleading "the" from Launch Library configuration (#150288) 2025-08-08 20:50:14 +02:00
Denis Shulyaka
91a1ca09f7 Add GPT-5 support (#150281) 2025-08-08 20:49:09 +02:00
Tom
9f1fe8a067 Add binary_sensor to UISP airOS (#149803)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-08 20:34:40 +02:00
Yuxin Wang
f2c9cdb09e Add quality scale for APCUPSD integration (#146999) 2025-08-08 20:31:34 +02:00
Tom
712115cdb8 Bump airOS to 0.2.6 improving device class matching more devices (#150134) 2025-08-08 19:33:16 +02:00
dependabot[bot]
eb6ae9d2d6 Bump actions/cache from 4.2.3 to 4.2.4 (#150253)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-08 15:28:51 +02:00
puddly
b126f3fa66 Bump ZHA to 0.0.68 (#150208) 2025-08-08 15:27:17 +02:00
puddly
2d720f0d32 Fix JSON serialization for ZHA diagnostics download (#150210) 2025-08-08 15:27:00 +02:00
Raphael Hehl
c0155f5e80 Handle Unifi Protect BadRequest exception during API key creation (#150223) 2025-08-08 15:26:02 +02:00
Thomas D
23a2d69984 Volvo: fix missing charging power options (#150272) 2025-08-08 15:25:19 +02:00
peteS-UK
a8779d5f52 Fix error on startup when no Apps or Radio plugins are installed for Squeezebox (#150267) 2025-08-08 15:24:41 +02:00
Robert Resch
01c197e830 Constraint num2words to 0.5.14 (#150276) 2025-08-08 15:06:31 +02:00
peteS-UK
ef4f476844 Fix handing for zero volume error in Squeezebox (#150265) 2025-08-08 14:26:04 +02:00
dependabot[bot]
8aee05b8b0 Bump github/codeql-action from 3.29.5 to 3.29.7 (#150254)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-08 13:57:20 +02:00
Denis Shulyaka
0f3f8d5707 Bump openai to 1.99.3 (#150232) 2025-08-08 13:57:12 +02:00
epenet
2948b1c58e Cleanup Tuya fixture files (#150190) 2025-08-08 13:56:44 +02:00
Joris Pelgröm
4cb2af4d08 Add select platform to LetPot integration (#150212) 2025-08-08 13:47:13 +02:00
G Johansson
8e12d2028d Remove previously deprecated linear_garage_door (#150109)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-08 13:09:01 +02:00
G Johansson
5b046def8e Move holiday object to runtime data in workday (#149122) 2025-08-08 12:02:07 +02:00
Åke Strandberg
6a81bf6f5e Improve interface between Miele integration and pymiele library (#150214) 2025-08-08 11:40:04 +02:00
epenet
102d6a37c0 Use generated device id in tuya tests (#150196) 2025-08-08 09:15:42 +02:00
Yuxin Wang
fd6aba3022 Add missing strings for APCUPSD (#150242) 2025-08-08 08:41:03 +02:00
tronikos
a88eadf863 Update Opower strings (#150247) 2025-08-08 08:40:28 +02:00
Tom
52f0d04c38 Improve Roborock test teardown (#150144) 2025-08-07 20:32:05 -07:00
Denis Shulyaka
3ab80c6ff2 Bump google-genai to 1.29.0 (#150225) 2025-08-07 16:26:02 -07:00
Vincent Wolsink
71485871c8 Bump Huum requirement to 0.8.1 (#150220) 2025-08-07 21:59:58 +01:00
Martin Hjelmare
ba0da4c2a3 Remove switchbot vacuum battery attribute (#150227) 2025-08-07 22:39:45 +02:00
Martin Hjelmare
cbaadebac3 Fix Tibber coordinator ContextVar warning (#150229) 2025-08-07 22:39:24 +02:00
Åke Strandberg
fd0ae32058 Bump pymiele to 0.5.3 (#150216) 2025-08-07 20:48:25 +02:00
Martin Hjelmare
382bf78ee0 Ignore MQTT vacuum battery warning (#150211) 2025-08-07 20:11:39 +02:00
Martin Hjelmare
6aa077a48d Silence vacuum battery deprecation for built in integrations (#150204) 2025-08-07 19:43:36 +02:00
Joakim Sørensen
b638fcbaad Bump hass-nabucasa from 0.111.1 to 0.111.2 (#150209) 2025-08-07 19:42:22 +02:00
G Johansson
704edac9fd Remove deprecated state from backup schedule (#150114)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-07 17:42:53 +01:00
yufeng
ff9e2a8f1e Update tuya translation for reverse energy sensor (#149317) 2025-08-07 17:08:57 +02:00
Stefan H.
d778afe61a Fix Enigma2 startup hang (#149756) 2025-08-07 15:33:24 +01:00
Norbert Rittel
448084e2b5 Fix description of button.press action (#150181) 2025-08-07 15:22:36 +02:00
jan iversen
d99379ffdf modbus: use only 1 logger instance. (#150130) 2025-08-07 15:11:00 +02:00
Maciej Bieniek
b835b7f266 Bump imgw_pib to version 1.5.3 (#150178) 2025-08-07 13:31:55 +02:00
epenet
e96e97edca Add Tuya snapshots tests for sj category (rain sensor) (#150173) 2025-08-07 13:24:33 +02:00
epenet
df7c657d7e Add Tuya snapshots tests for wk category (thermostat) (#150175) 2025-08-07 12:53:19 +02:00
epenet
4f5502ab47 Add Tuya snapshots tests for ldcg category (luminance sensor) (#150169) 2025-08-07 12:50:46 +02:00
epenet
c30ee776e9 Add Tuya snapshots tests for zwjcy category (soil sensor) (#150168) 2025-08-07 10:44:51 +02:00
epenet
efebdc0181 Add Tuya snapshots tests for cl category (curtains) (#150167) 2025-08-07 10:42:36 +02:00
jan iversen
da7fc88f1f Bump pymodbus to v3.11.0. (#150129) 2025-08-07 08:13:11 +02:00
Joris Pelgröm
566aeb5e9a Bump letpot to 0.6.1 (#150137) 2025-08-07 08:08:47 +02:00
J. Nick Koston
d17f0ef55a Bump inkbird-ble to 1.1.0 to add support for IAM-T2 (#150158) 2025-08-07 08:07:31 +02:00
Abílio Costa
35025c4b59 Fix roborock config flow tests (#150135) 2025-08-07 00:05:31 +01:00
Abílio Costa
e5d512d5e5 Add entity filter to target state change tracker (#150064)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-06 20:03:09 +02:00
puddly
2b5028bfb7 Bump ZHA to 0.0.67 (#150132) 2025-08-06 19:56:44 +02:00
Paul Bottein
757fee9f73 Use state selector for climate set hvac mode service (#148963) 2025-08-06 17:48:55 +01:00
Artur Pragacz
06130219b4 Use relative condition keys (#150021) 2025-08-06 17:20:30 +01:00
AlCalzone
4e2fe63182 Check for Z-Wave firmware updates of sleeping devices (#150123) 2025-08-06 18:08:51 +02:00
Luca Angemi
d0cc9990dd Deprecate Roborock battery feature (#150126) 2025-08-06 17:32:23 +02:00
epenet
76ca9ce3a4 Add comment to Tuya code for unsupported devices (#150125)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-06 17:12:32 +02:00
epenet
124e7cf4c8 Add support for tuya ywcgq category (liquid level) (#150096)
Thanks @joostlek / @frenck
2025-08-06 15:38:50 +02:00
G Johansson
260ea9a3be Remove previously deprecated raw value attribute from onewire (#150112) 2025-08-06 15:24:22 +02:00
Bram Kragten
e1f6820cb6 Update frontend to 20250806.0 (#150106)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-06 15:22:46 +02:00
David Poll
2215777cfb Fix zero-argument functions with as_function (#150062) 2025-08-06 15:20:03 +02:00
G Johansson
fa3ce62ae8 Bump holidays to 0.78 (#150103) 2025-08-06 14:55:00 +02:00
Joakim Sørensen
33421bddf3 Remove myself as codeowner from traccar_server (#150107) 2025-08-06 14:51:43 +02:00
markhannon
1efe2b437d Improve dependency transparency for Zimi integration (#145879) 2025-08-06 14:50:06 +02:00
Joost Lekkerkerker
a54f0adf74 Enable disabled Ollama config entries after entry migration (#150105) 2025-08-06 14:27:36 +02:00
epenet
afe574f74e Simplify DPCode lookup in Tuya (#150052) 2025-08-06 14:24:01 +02:00
epenet
25aae8944d Add Tuya snapshots tests for mzj category (sous-vide) (#150102) 2025-08-06 14:17:30 +02:00
Martin Hjelmare
f26e6ad211 Fix update coordinator ContextVar log for custom integrations (#150100) 2025-08-06 14:14:42 +02:00
Joost Lekkerkerker
e9444a2e4d Enable disabled Anthropic config entries after entry migration (#150098) 2025-08-06 13:24:49 +02:00
Joost Lekkerkerker
60988534a9 Enable disabled OpenAI config entries after entry migration (#150099) 2025-08-06 13:24:37 +02:00
Michael
932bf81ac8 Add common constant ATTR_CONFIG_ENTRY_ID (#150067) 2025-08-06 12:42:51 +02:00
Jan Bouwhuis
1302b6744e Deprecate MQTT vacuum battery feature and remove it as default feature (#149877)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-06 11:51:31 +02:00
tronikos
0aeff366bd Fix PG&E and Duquesne Light Company in Opower (#149658)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-08-06 11:32:42 +02:00
epenet
0db23b0da6 Add Tuya debug logging for new devices (#150091)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-06 11:23:34 +02:00
epenet
863e2074b6 Add more switches to Tuya tdq category (#150090) 2025-08-06 11:03:26 +02:00
epenet
13828f6713 Remove tuya vacuum battery level attribute (#150086) 2025-08-06 11:02:04 +02:00
starkillerOG
fdb38ec8ec Reduce Reolink fimware polling from 12h to 24h (#150095) 2025-08-06 10:58:52 +02:00
Robert Resch
55abb6e594 Fix hassio tests by only mocking supervisor id (#150093) 2025-08-06 10:53:55 +02:00
Stefan Agner
a83e4f5c63 Add missing translations for unhealthy Supervisor issues (#150036) 2025-08-06 10:07:36 +02:00
J. Nick Koston
cba15ee439 Bump habluetooth to 4.0.2 (#150078)
Co-authored-by: Robert Resch <robert@resch.dev>
2025-08-06 09:51:44 +02:00
dependabot[bot]
400620399a Bump actions/download-artifact from 4.3.0 to 5.0.0 (#150084)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-06 09:48:10 +02:00
dependabot[bot]
28e19215ad Bump actions/ai-inference from 1.2.7 to 1.2.8 (#150083)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-06 09:46:02 +02:00
Philipp Waller
119d0a0170 Update knx-frontend to 2025.8.6.52906 (#150085) 2025-08-06 09:28:44 +02:00
Joakim Sørensen
69faf38e86 Bump hass-nabucasa from 0.111.0 to 0.111.1 (#150082) 2025-08-06 09:24:09 +02:00
puddly
d0ef1a1a8b Bump ZHA to 0.0.66 (#150081) 2025-08-06 09:22:07 +02:00
Retha Runolfsson
8f328810bf Bump pyswitchbot to 0.68.3 (#150080) 2025-08-05 19:20:37 -10:00
Pete Sage
4f1b75e3b4 Bump soco to 0.30.11 (#150072) 2025-08-05 22:56:27 +01:00
J. Nick Koston
445a7fc749 Bump yalexs to 8.11.1 (#150073) 2025-08-05 22:55:01 +01:00
Robert Svensson
977c0797aa Bump axis to v65 (#150065) 2025-08-05 11:36:48 -10:00
Ludovic BOUÉ
a24f027923 Add icon for esa_state in Matter integration (#149075) 2025-08-05 23:18:48 +02:00
Martin Hjelmare
7b45798e30 Remove matter vacuum battery level attribute (#150061) 2025-08-05 22:40:42 +02:00
Artur Pragacz
2b0cda0ad1 Adjust condition and trigger method names (#150060) 2025-08-05 19:46:03 +01:00
starkillerOG
12dca4b1bf Bump reolink-aio to 0.14.6 (#150055) 2025-08-05 18:58:22 +02:00
karwosts
8c509b11b2 Fix template sensor uom string (#150057) 2025-08-05 18:56:34 +02:00
Joost Lekkerkerker
991c9008bd Change AI task strings (#150051) 2025-08-05 16:35:41 +02:00
Martin Hjelmare
fe95f6e1c5 Improve downloader service (#150046)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-05 16:12:55 +02:00
Bram Kragten
37510aa316 Update frontend to 20250805.0 (#150049) 2025-08-05 16:01:47 +02:00
Marc Mueller
4e40e9bf74 Update mypy-dev to 1.18.0a4 (#150005) 2025-08-05 15:56:03 +02:00
Bouwe Westerdijk
70c9b1f095 Implement snapshot testing for Plugwise button platform (#149984) 2025-08-05 15:31:02 +02:00
dependabot[bot]
f714388130 Bump docker/login-action from 3.4.0 to 3.5.0 (#150034)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-05 15:25:58 +02:00
Joost Lekkerkerker
ffb2a693f4 Ignore vacuum entities that properly deprecate battery (#150043) 2025-08-05 15:22:21 +02:00
Andrew Jackson
9d8e253ad3 Default to zero quantity on new todo items in Mealie (#150047) 2025-08-05 15:15:08 +02:00
dependabot[bot]
31631cc882 Bump actions/ai-inference from 1.2.4 to 1.2.7 (#150038)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-05 14:40:01 +02:00
epenet
3a64357201 Fix Tuya fan speeds with numeric values (#149971) 2025-08-05 13:22:45 +02:00
Thomas55555
20fdec9e9c Reduce polling in Husqvarna Automower (#149255)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-05 12:56:27 +02:00
Nippey
064a63fe1f Add support for Tuya "Bresser 7-in-1 Weatherstation" (#149498) 2025-08-05 12:54:40 +02:00
epenet
803654223a Revert "Do not create Tuya fan entities without control" (#150032) 2025-08-05 12:23:06 +02:00
epenet
a6148b50cf Add Tuya snapshots tests for button and vacuum platform (#149968) 2025-08-05 11:21:05 +02:00
Ludovic BOUÉ
02a3c5be14 Matter pump setpoint CurrentLevel limit (#149689) 2025-08-05 11:19:03 +02:00
Paulus Schoutsen
08ea640629 Do not allow overriding users when uuid is duplicate (#149408) 2025-08-05 11:13:32 +02:00
Grzegorz M
7dd761c9c3 Bump icalendar from 6.1.0 to 6.3.1 for CalDav (#149990) 2025-08-05 11:09:03 +02:00
epenet
6b827dfc33 Do not create Tuya fan entities without control (#149976)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-05 09:52:29 +02:00
Robert Resch
67c19087dd Bump deebot-client to 13.6.0 (#149983) 2025-08-05 09:08:33 +02:00
epenet
55c7c2f730 Redact terminal_id in Tuya fixture files (#149957) 2025-08-05 09:06:15 +02:00
Matthias Alphart
afee936c3d Update knx-frontend to 2025.8.4.154919 (#149991) 2025-08-05 09:03:23 +02:00
Marc Mueller
ed2ced6c36 Fix zimi test RuntimeWarnings (#150017) 2025-08-05 08:55:54 +02:00
Martin Hjelmare
4c5cf028d7 Fix Z-Wave duplicate provisioned device (#150008) 2025-08-05 08:50:42 +02:00
Thomas55555
68faa897ad Bump aioautomower to 2.1.2 (#150003) 2025-08-05 08:48:47 +02:00
Artur Pragacz
53c9c42148 Use relative trigger keys (#149846) 2025-08-04 23:01:40 +01:00
Michael Hansen
d48cc03be7 Bump wyoming to 1.7.2 (#150007) 2025-08-04 23:36:24 +02:00
starkillerOG
28236aa023 Reolink disable entities by default (#149986) 2025-08-04 23:03:38 +02:00
Tom
bfae07135a Bump python-airos to 0.2.4 (#149885) 2025-08-04 22:35:47 +02:00
Thomas55555
99d580e371 Add reset cutting blade usage time to Husqvarna Automower (#149628) 2025-08-04 22:28:34 +02:00
Petro31
4d53450cbf Create battery_level deprecation repair for template vacuum platform (#149987)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-08-04 21:54:50 +02:00
epenet
1fbce01e26 Add initial support for Tuya wg2 category (#149676) 2025-08-04 21:30:43 +02:00
markhannon
a9621ac811 Add tests for Zimi entitites (#144292) 2025-08-04 20:41:05 +02:00
Marc Mueller
94f2118b19 Fix flaky history_stats test case (#149974) 2025-08-04 20:34:07 +02:00
Mike Degatano
73ca6b4900 Add translation strings for unsupported OS version (#149837) 2025-08-04 17:40:11 +02:00
Joakim Sørensen
31e647b5b0 Bump hass-nabucasa from 0.110.1 to 0.111.0 (#149977) 2025-08-04 16:59:07 +02:00
epenet
fac5b2c09c Add Tuya snapshots tests for camera platform (#149959) 2025-08-04 16:58:46 +02:00
Martin Hjelmare
ae48179e95 Bump zwave-js-server-python to 0.67.1 (#149972) 2025-08-04 15:58:57 +02:00
Willem-Jan van Rootselaar
88c9d5dbe3 Fix bsblan reauthentication (#149926) 2025-08-04 15:35:41 +02:00
hanwg
b76f47cd9f Add bot details to Telegram bot events (#148638) 2025-08-04 14:32:48 +02:00
hanwg
822e1ffc8d Minor UI improvements for Telegram bot actions (#149889) 2025-08-04 14:27:15 +02:00
Martin Hjelmare
1632e0aef6 Direct migrations with Z-Wave JS UI to docs (#149966) 2025-08-04 13:36:12 +02:00
Petro31
e2bc73f153 Fix optimistic covers (#149962) 2025-08-04 13:35:13 +02:00
Joakim Sørensen
46cfdddc80 Move to the new handler for migrate_paypal_agreement (#149934) 2025-08-04 13:29:11 +02:00
Joost Lekkerkerker
0bdf6757c4 Pass config entry to Remote Calendar coordinator (#149958) 2025-08-04 13:28:59 +02:00
Joost Lekkerkerker
312e590360 Pass config entry to Broadlink coordinator (#149949) 2025-08-04 13:27:51 +02:00
Joost Lekkerkerker
7a6aaf667b Pass config entry to hue coordinator (#149941) 2025-08-04 13:27:10 +02:00
Joost Lekkerkerker
33eaca24d6 Pass config entry to Simplisafe coordinator (#149943) 2025-08-04 13:21:29 +02:00
Joost Lekkerkerker
3d27d501b1 Pass config entry to Mill coordinator (#149942) 2025-08-04 13:20:30 +02:00
Joost Lekkerkerker
39b651e075 Pass config entry to Kraken coordinator (#149944) 2025-08-04 13:17:27 +02:00
Joost Lekkerkerker
a962777a2e Pass config entry to Meteo France coordinator (#149945) 2025-08-04 13:14:50 +02:00
Joost Lekkerkerker
594ce8f266 Pass config entry to Smarttub coordinator (#149946) 2025-08-04 12:58:46 +02:00
Joost Lekkerkerker
9f867f268c Pass config entry to Snoo coordinator (#149947) 2025-08-04 12:58:19 +02:00
Joost Lekkerkerker
9edd242734 Pass config entry to SMS coordinator (#149955) 2025-08-04 12:49:26 +02:00
Bouwe Westerdijk
93e11aa8bc Refresh plugwise test-fixtures (#149875) 2025-08-04 12:35:24 +02:00
Joakim Sørensen
c2b298283e Bump hass-nabucasa from 0.110.0 to 0.110.1 (#149956) 2025-08-04 12:32:01 +02:00
Joost Lekkerkerker
106c086e8b Pass config entry to Unifi coordinator (#149952) 2025-08-04 12:29:27 +02:00
Markus Adrario
cbf4130bff Add zeroconf flow to Homee (#149820)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-04 12:26:22 +02:00
Erik Montnemery
afffe0b08b Fix DeviceEntry.suggested_area deprecation warning (#149951) 2025-08-04 12:20:30 +02:00
Joost Lekkerkerker
c1ccfee7cc Pass config entry to AsusWRT coordinator (#149953) 2025-08-04 12:08:03 +02:00
epenet
8d8383e1c1 Add extra Tuya snapshots for dc and dj category (lights) (#149940) 2025-08-04 12:07:25 +02:00
Marc Mueller
f350a1a1fa Add hassfest check to help with future dependency updates (#149624) 2025-08-04 12:03:39 +02:00
epenet
fe2bd8d09e Add Tuya snapshots for ywcgq category (#149948) 2025-08-04 12:02:34 +02:00
Joost Lekkerkerker
cf14226b02 Pass config entry to Fronius coordinator (#149954) 2025-08-04 12:02:21 +02:00
Brett Adams
bd3fe1d4ad Fix credit sensor when there are no vehicles in Teslemetry (#149925) 2025-08-04 11:26:14 +02:00
Christopher Fenner
377ca04be8 Update sensor icons in Volvo integration (#149811) 2025-08-04 11:24:51 +02:00
epenet
5837f55205 Add extra Tuya snapshots for cz category (#149938) 2025-08-04 11:23:58 +02:00
andreimoraru
0766edb9c4 Bump yt-dlp to 2025.07.21 (#149916)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-04 11:15:38 +02:00
epenet
e62e3778f3 Add Tuya snapshots for hps category (#149936) 2025-08-04 11:14:11 +02:00
epenet
aa8e4c1c15 Add Tuya snapshots for sgbj, sp, wfcon and ywbj category (#149933) 2025-08-04 11:11:06 +02:00
Erik Montnemery
46ed8a73fc Bump automower-ble to 0.2.7 (#149928) 2025-08-04 11:09:18 +02:00
dependabot[bot]
83f22497ae Bump actions/ai-inference from 1.2.3 to 1.2.4 (#149929)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-04 11:09:02 +02:00
epenet
3dda1685dc Add Tuya snapshots for pc and pir category (#149931) 2025-08-04 11:08:43 +02:00
Ståle Storø Hauknes
6fa9d42401 Airthings ContextVar warning (#149930) 2025-08-04 11:05:32 +02:00
jvmahon
1a54d566f8 Apple vendor name update (#149845) 2025-08-04 10:26:11 +02:00
puddly
1a9cae0f89 Bump ZHA to 0.0.65 (#149922) 2025-08-04 10:17:25 +02:00
epenet
551dcaa169 Rename Tuya fixture files (#149927) 2025-08-04 10:08:03 +02:00
epenet
5467db065b Make Tuya complex type handling explicit (#149677) 2025-08-04 07:59:47 +02:00
J. Nick Koston
6a8d752e56 Bump aiodiscover to 2.7.1 (#149920) 2025-08-03 16:42:38 -10:00
J. Nick Koston
179a56628d Bump dbus-fast to 2.44.3 (#149921) 2025-08-03 16:42:11 -10:00
J. Nick Koston
b3f830773a Bump yalexs-ble to 3.1.2 (#149917) 2025-08-03 15:02:30 -10:00
Joost Lekkerkerker
084e06ec7d Bump python-open-router to 0.3.1 (#149873) 2025-08-03 21:46:40 +02:00
Maciej Bieniek
e0190afd3c Bump imgw_pib to version 1.5.2 (#149892) 2025-08-03 20:07:01 +02:00
Jan-Philipp Benecke
b9e16d54c4 Add jitter sensor to Ping integration (#149899) 2025-08-03 20:06:14 +02:00
Thomas55555
627785edc1 Fix options for error sensor in Husqvarna Automower (#149901) 2025-08-03 20:05:23 +02:00
Andrew Jackson
4318e29ce8 Bump aiomealie to 0.10.1 (#149890) 2025-08-03 14:18:13 +02:00
Martin Hjelmare
fea5c63bba Fix Z-Wave handling of driver ready event (#149879) 2025-08-03 11:23:01 +02:00
Åke Strandberg
b2349ac2bd Improve miele climate test coverage (#149859) 2025-08-03 11:19:08 +02:00
Marc Mueller
08f7b708a4 Update pytest warnings filter (#149839) 2025-08-03 09:25:17 +02:00
Martin Hjelmare
1236801b7d Fix Z-Wave config entry state conditions in listen task (#149841) 2025-08-02 23:07:16 +02:00
Thomas D
72d9dbf39d Add scopes in config flow auth request for Volvo integration (#149813) 2025-08-02 22:17:13 +02:00
Thomas D
755864f9f3 Add sensor platform to Qbus integration (#149389)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-02 20:01:58 +02:00
peteS-UK
fa476d4e34 Fix initialisation of Apps and Radios list for Squeezebox (#149834) 2025-08-02 20:01:02 +02:00
Manu
018197e41a Add notifiers to send direct messages to friends in PlayStation Network (#149844) 2025-08-02 19:55:45 +02:00
Brett Adams
7dd2b9e422 Make history coordinator more reliable in Tesla Fleet (#149854) 2025-08-02 19:54:19 +02:00
hahn-th
3e615fd373 Improve code quality for garage door modules in homematicip_cloud (#149856) 2025-08-02 19:51:08 +02:00
Oliver
c0bf167e10 Update denonavr to 1.1.2 (#149842) 2025-08-02 19:44:01 +02:00
Andrea Turri
45f6778ff4 Fix Miele hob translation keys (#149865) 2025-08-02 18:37:57 +02:00
Jamin
bddd4d621a Bump VoIP utils to 0.3.4 (#149786) 2025-08-01 20:37:45 +01:00
Norbert Rittel
b0e75e9ee4 Update reference for volatile_organic_compounds_parts in template (#149831) 2025-08-01 20:36:10 +01:00
Norbert Rittel
d45c03a795 Update reference for volatile_organic_compounds_parts in random (#149832) 2025-08-01 20:35:04 +01:00
Norbert Rittel
8562c8d32f Add translations for recently introduced device classes to scrape (#149822) 2025-08-01 20:34:31 +01:00
Norbert Rittel
ae42d71123 Add translations for recently introduced device classes to sql (#149821) 2025-08-01 20:33:47 +01:00
Alexandre CUER
9616c8cd7b Bump pyemoncms to 0.1.2 (#149825) 2025-08-01 20:04:16 +01:00
kizovinh
9394546668 Add EZVIZ battery camera power status and online status sensor (#146822) 2025-08-01 20:00:53 +01:00
Norbert Rittel
d43f21c2e2 Fix descriptions for template number fields (#149804) 2025-08-01 20:35:48 +02:00
Norbert Rittel
8d68fee9f8 Add translation for absolute_humidity device class to template (#149814) 2025-08-01 18:30:59 +01:00
Willem-Jan van Rootselaar
b4a4e218ec Add re-authentication to BSBLan (#146280)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-08-01 16:42:59 +02:00
Norbert Rittel
fb2d62d692 Add translation for absolute_humidity device class to mqtt (#149818) 2025-08-01 15:57:47 +02:00
Erik Montnemery
f538807d6e Make device suggested_area only influence new devices (#149758)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2025-08-01 14:54:58 +02:00
Joost Lekkerkerker
a08c3c9f44 Improve Tado binary sensor tests (#149807) 2025-08-01 14:38:12 +02:00
Joost Lekkerkerker
506431c75f Improve Tado water heater tests (#149806) 2025-08-01 14:38:02 +02:00
Joost Lekkerkerker
37579440e6 Improve Tado climate tests (#149808) 2025-08-01 14:37:12 +02:00
Joost Lekkerkerker
5ce2729dc2 Improve Tado sensor tests (#149809) 2025-08-01 14:36:57 +02:00
Joost Lekkerkerker
b5e4ae4a53 Improve Tado switch tests (#149810)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-01 14:36:37 +02:00
Norbert Rittel
3d4386ea6d Add translation for absolute_humidity device class to random (#149815) 2025-08-01 14:32:14 +02:00
Alexandre CUER
9f1cec893e emoncms - fix missing data descriptions (#149733) 2025-08-01 13:22:46 +02:00
starkillerOG
bc87140a6f Update after Motion Blinds tilt change (#149779) 2025-08-01 11:15:49 +02:00
Erik Montnemery
d77a3fca83 Exclude is_new from DeviceEntry snapshots (#149801) 2025-08-01 11:01:26 +02:00
Joakim Sørensen
924a86dfb6 Add nameservers to supervisor system health response (#149749) 2025-08-01 10:51:48 +02:00
Erik Montnemery
0d7608f7c5 Deprecate DeviceEntry.suggested_area (#149730) 2025-08-01 10:34:34 +02:00
Tom
22e054f4cd Add diagnostics to UISP AirOS (#149631) 2025-08-01 09:24:22 +02:00
epenet
8b53b26333 Fix tuya light supported color modes (#149793)
Co-authored-by: Erik <erik@montnemery.com>
2025-08-01 09:13:53 +02:00
Erik Montnemery
4d59e8cd80 Fix flaky velbus test (#149743) 2025-08-01 07:49:51 +02:00
Fabian Leutgeb
61396d92a5 Homekit valve duration characteristics (#149698)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-07-31 15:21:48 -10:00
Philippe Lafoucrière
c72c600de4 Fix bootstrap script path resolution (#149721) 2025-07-31 23:47:25 +01:00
J. Nick Koston
b86b0c10bd Bump aioesphomeapi to 37.2.2 (#149755) 2025-07-31 12:23:24 -10:00
starkillerOG
eb222f6c5d Bump motionblinds to 0.6.30 (#149764) 2025-08-01 01:09:20 +03:00
Manu
4b5fe424ed Hide configuration URL when Uptime Kuma is installed locally (#149781) 2025-08-01 01:07:56 +03:00
Nathan Spencer
61ca42e923 Bump pylitterbot to 2024.2.3 (#149763) 2025-07-31 21:04:23 +02:00
Copilot
21c1427abf Fix ZHA ContextVar deprecation by passing config_entry (#149748)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: joostlek <7083755+joostlek@users.noreply.github.com>
Co-authored-by: puddly <32534428+puddly@users.noreply.github.com>
Co-authored-by: TheJulianJES <6409465+TheJulianJES@users.noreply.github.com>
2025-07-31 14:52:17 -04:00
karwosts
aa6b37bc7c Fix add_suggested_values_to_schema when the schema has sections (#149718)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-07-31 20:50:26 +02:00
Marc Mueller
bbc1466cfc Update rpds-py to 0.26.0 (#149753) 2025-07-31 17:51:10 +01:00
Bram Kragten
21a9799060 Update frontend to 20250731.0 (#149757) 2025-07-31 18:46:10 +02:00
Erik Montnemery
f7d54b46ec Improve test of FlowHandler.add_suggested_values_to_schema (#149759) 2025-07-31 17:55:15 +02:00
Erik Montnemery
6ad1b8dcb1 Fix kitchen_sink option flow (#149760) 2025-07-31 17:49:09 +02:00
Abílio Costa
5f6b1212a3 Remove data flow step_id deprecation note (#149714) 2025-07-31 16:04:09 +02:00
dependabot[bot]
58dc6a952e Bump home-assistant/wheels from 2025.03.0 to 2025.07.0 (#149741) 2025-07-31 15:35:55 +02:00
Petro31
59d8df142d Nitpick default translations for template integration (#149740) 2025-07-31 15:19:43 +02:00
Petro31
04fb86b4ba Fix unique_id in config validation for legacy weather platform (#149742) 2025-07-31 15:19:37 +02:00
Erik Montnemery
3d744f032f Make _EventDeviceRegistryUpdatedData_Remove JSON serializable (#149734) 2025-07-31 12:35:13 +02:00
J. Nick Koston
f7c8cdb3a7 Bump aioesphomeapi to 37.2.0 (#149732) 2025-07-31 12:10:23 +02:00
Copilot
3952544822 Fix ContextVar deprecation warning in homeassistant_hardware integration (#149687)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: joostlek <7083755+joostlek@users.noreply.github.com>
Co-authored-by: mib1185 <35783820+mib1185@users.noreply.github.com>
2025-07-31 12:06:04 +02:00
Erik Montnemery
42101dd432 Remove result from FlowResult (#149202) 2025-07-31 10:58:36 +02:00
L.
f7eacaa48d Bump xiaomi-ble to 1.2.0 (#149711) 2025-07-31 09:01:06 +02:00
johanzander
ad0db5c83a Update growattServer to version 1.7.1 (#149716) 2025-07-31 08:17:33 +02:00
J. Nick Koston
63216b77c2 Bump aioesphomeapi to 37.1.6 (#149715) 2025-07-30 13:54:18 -10:00
Åke Strandberg
7a55373b0b Fix bug when interpreting miele action response (#149710) 2025-07-31 01:07:12 +02:00
J. Nick Koston
f9e7459901 Fix ESPHome unnecessary probing on DHCP discovery (#149713) 2025-07-31 01:06:08 +02:00
starkillerOG
94dc2e2ea3 Bump reolink-aio to 0.14.5 (#149700) 2025-07-30 22:54:32 +01:00
Åke Strandberg
2cf144fb25 Add missing translations for miele dishwasher (#149702) 2025-07-30 22:45:05 +01:00
Jan Bouwhuis
f318766021 Fix inconsistent use of the term 'target' and a typo in MQTT translation strings (#149703) 2025-07-30 22:42:53 +01:00
Andrea Turri
ec7fb140ac Fix Miele induction hob empty state (#149706) 2025-07-30 22:38:11 +01:00
Petro31
2706c7d67d Add translations for all fields in template integration (#149692)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-07-30 22:30:05 +01:00
Roman Sivriver
b4e50902eb Fix typo in backup log message (#149705) 2025-07-30 22:29:26 +01:00
Åke Strandberg
1ead01bc9a Explicitly pass config_entry to miele coordinator (#149691) 2025-07-30 20:19:01 +02:00
puddly
389a1251a1 Bump ZHA to 0.0.64 (#149683)
Co-authored-by: TheJulianJES <TheJulianJES@users.noreply.github.com>
Co-authored-by: abmantis <amfcalt@gmail.com>
2025-07-30 18:59:41 +01:00
Manu
8d27ca1e21 Fix KeyError in friends coordinator (#149684) 2025-07-30 19:59:01 +02:00
Michael Hansen
a76af50c10 Bump intents to 2025.7.30 (#149678) 2025-07-30 19:57:59 +02:00
Renat Sibgatulin
09b91bd76a Clean airq tests (#149682) 2025-07-30 18:48:36 +01:00
Jan Bouwhuis
736d582d04 Fix translation string reference for MQTT climate subentry option (#149673) 2025-07-30 18:53:21 +02:00
Bram Kragten
8114df4219 Bump version to 2025.9.0 (#149680) 2025-07-30 18:36:20 +02:00
1381 changed files with 115578 additions and 15488 deletions

View File

@@ -27,7 +27,7 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
@@ -90,7 +90,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: translations
@@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.4.0
uses: docker/login-action@v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -242,7 +242,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set build additional args
run: |
@@ -256,7 +256,7 @@ jobs:
fi
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.4.0
uses: docker/login-action@v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -279,7 +279,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -321,7 +321,7 @@ jobs:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2
@@ -330,14 +330,14 @@ jobs:
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@v3.4.0
uses: docker/login-action@v3.5.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@v3.4.0
uses: docker/login-action@v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -454,7 +454,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
@@ -462,7 +462,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: translations
@@ -499,10 +499,10 @@ jobs:
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -37,10 +37,10 @@ on:
type: boolean
env:
CACHE_VERSION: 4
CACHE_VERSION: 5
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.8"
HA_SHORT_VERSION: "2025.9"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -94,7 +94,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: |
@@ -246,7 +246,7 @@ jobs:
- info
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -255,7 +255,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
uses: actions/cache@v4.2.4
with:
path: venv
key: >-
@@ -271,7 +271,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.2.3
uses: actions/cache@v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -292,7 +292,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -301,7 +301,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -310,7 +310,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -332,7 +332,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -341,7 +341,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -350,7 +350,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -372,7 +372,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -381,7 +381,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -390,7 +390,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -462,7 +462,7 @@ jobs:
- script/hassfest/docker/Dockerfile
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -481,7 +481,7 @@ jobs:
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -497,7 +497,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v4.2.3
uses: actions/cache@v4.2.4
with:
path: venv
key: >-
@@ -505,7 +505,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v4.2.3
uses: actions/cache@v4.2.4
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -584,7 +584,7 @@ jobs:
sudo apt-get -y install \
libturbojpeg
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -593,7 +593,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -617,7 +617,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -626,7 +626,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -651,7 +651,7 @@ jobs:
&& github.event_name == 'pull_request'
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Dependency review
uses: actions/dependency-review-action@v4.7.1
with:
@@ -674,7 +674,7 @@ jobs:
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -683,7 +683,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -717,7 +717,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -726,7 +726,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -764,7 +764,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -773,7 +773,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -809,7 +809,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -825,7 +825,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -833,7 +833,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@v4.2.3
uses: actions/cache@v4.2.4
with:
path: .mypy_cache
key: >-
@@ -886,7 +886,7 @@ jobs:
libturbojpeg \
libgammu-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -895,7 +895,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -947,7 +947,7 @@ jobs:
libgammu-dev \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -956,7 +956,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -970,7 +970,7 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: pytest_buckets
- name: Compile English translations
@@ -1080,7 +1080,7 @@ jobs:
libmariadb-dev-compat \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1089,7 +1089,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1222,7 +1222,7 @@ jobs:
sudo apt-get -y install \
postgresql-server-dev-14
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1231,7 +1231,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1334,9 +1334,9 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
pattern: coverage-*
- name: Upload coverage to Codecov
@@ -1381,7 +1381,7 @@ jobs:
libgammu-dev \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1390,7 +1390,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@v4.2.3
uses: actions/cache/restore@v4.2.4
with:
path: venv
fail-on-cache-miss: true
@@ -1484,9 +1484,9 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
pattern: coverage-*
- name: Upload coverage to Codecov
@@ -1511,7 +1511,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
pattern: test-results-*
- name: Upload test results to Codecov

View File

@@ -21,14 +21,14 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.29.5
uses: github/codeql-action/init@v3.29.9
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.29.5
uses: github/codeql-action/analyze@v3.29.9
with:
category: "/language:python"

View File

@@ -231,7 +231,7 @@ jobs:
- name: Detect duplicates using AI
id: ai_detection
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/ai-inference@v1.2.3
uses: actions/ai-inference@v2.0.0
with:
model: openai/gpt-4o
system-prompt: |

View File

@@ -57,7 +57,7 @@ jobs:
- name: Detect language using AI
id: ai_language_detection
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/ai-inference@v1.2.3
uses: actions/ai-inference@v2.0.0
with:
model: openai/gpt-4o-mini
system-prompt: |

View File

@@ -9,7 +9,7 @@ jobs:
check-authorization:
runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.issue_type == 'Task'
if: github.event.issue.type.name == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@v7

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0

View File

@@ -32,7 +32,7 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
@@ -135,20 +135,20 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download env_file
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: requirements_diff
@@ -159,7 +159,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2025.03.0
uses: home-assistant/wheels@2025.07.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -184,25 +184,25 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download env_file
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: actions/download-artifact@v4.3.0
uses: actions/download-artifact@v5.0.0
with:
name: requirements_all_wheels
@@ -219,7 +219,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: home-assistant/wheels@2025.03.0
uses: home-assistant/wheels@2025.07.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

View File

@@ -18,7 +18,7 @@ repos:
exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v6.0.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]

View File

@@ -310,7 +310,6 @@ homeassistant.components.letpot.*
homeassistant.components.lidarr.*
homeassistant.components.lifx.*
homeassistant.components.light.*
homeassistant.components.linear_garage_door.*
homeassistant.components.linkplay.*
homeassistant.components.litejet.*
homeassistant.components.litterrobot.*
@@ -467,6 +466,7 @@ homeassistant.components.simplisafe.*
homeassistant.components.siren.*
homeassistant.components.skybell.*
homeassistant.components.slack.*
homeassistant.components.sleep_as_android.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.smlight.*

16
CODEOWNERS generated
View File

@@ -156,8 +156,8 @@ build.json @home-assistant/supervisor
/tests/components/assist_pipeline/ @balloob @synesthesiam
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
/tests/components/asuswrt/ @kennedyshead @ollo69
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
/homeassistant/components/atag/ @MatsNL
/tests/components/atag/ @MatsNL
/homeassistant/components/aten_pe/ @mtdcr
@@ -438,8 +438,8 @@ build.json @home-assistant/supervisor
/tests/components/enigma2/ @autinerd
/homeassistant/components/enocean/ @bdurrer
/tests/components/enocean/ @bdurrer
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
/homeassistant/components/entur_public_transport/ @hfurubotten
/homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie
@@ -862,8 +862,6 @@ build.json @home-assistant/supervisor
/tests/components/lifx/ @Djelibeybi
/homeassistant/components/light/ @home-assistant/core
/tests/components/light/ @home-assistant/core
/homeassistant/components/linear_garage_door/ @IceBotYT
/tests/components/linear_garage_door/ @IceBotYT
/homeassistant/components/linkplay/ @Velleman
/tests/components/linkplay/ @Velleman
/homeassistant/components/linux_battery/ @fabaff
@@ -1417,6 +1415,8 @@ build.json @home-assistant/supervisor
/tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau
/tests/components/slack/ @tkdrob @fletcherau
/homeassistant/components/sleep_as_android/ @tr4nt0r
/tests/components/sleep_as_android/ @tr4nt0r
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
/tests/components/sleepiq/ @mfugate1 @kbickar
/homeassistant/components/slide/ @ualex73
@@ -1599,6 +1599,8 @@ build.json @home-assistant/supervisor
/tests/components/todo/ @home-assistant/core
/homeassistant/components/todoist/ @boralyl
/tests/components/todoist/ @boralyl
/homeassistant/components/togrill/ @elupus
/tests/components/togrill/ @elupus
/homeassistant/components/tolo/ @MatthiasLohr
/tests/components/tolo/ @MatthiasLohr
/homeassistant/components/tomorrowio/ @raman325 @lymanepp
@@ -1613,8 +1615,6 @@ build.json @home-assistant/supervisor
/tests/components/tplink_omada/ @MarkGodwin
/homeassistant/components/traccar/ @ludeeus
/tests/components/traccar/ @ludeeus
/homeassistant/components/traccar_server/ @ludeeus
/tests/components/traccar_server/ @ludeeus
/homeassistant/components/trace/ @home-assistant/core
/tests/components/trace/ @home-assistant/core
/homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.7.1
RUN pip3 install uv==0.8.9
WORKDIR /usr/src

View File

@@ -120,6 +120,9 @@ class AuthStore:
new_user = models.User(**kwargs)
while new_user.id in self._users:
new_user = models.User(**kwargs)
self._users[new_user.id] = new_user
if credentials is None:

View File

@@ -33,7 +33,10 @@ class AuthFlowContext(FlowContext, total=False):
redirect_uri: str
AuthFlowResult = FlowResult[AuthFlowContext, tuple[str, str]]
class AuthFlowResult(FlowResult[AuthFlowContext, tuple[str, str]], total=False):
"""Typed result dict for auth flow."""
result: Credentials # Only present if type is CREATE_ENTRY
@attr.s(slots=True)

View File

@@ -10,7 +10,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
_PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.SENSOR,
]
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:

View File

@@ -0,0 +1,106 @@
"""AirOS Binary Sensor component for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describe an AirOS binary sensor."""
value_fn: Callable[[AirOSData], bool]
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
AirOSBinarySensorEntityDescription(
key="portfw",
translation_key="port_forwarding",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.portfw,
),
AirOSBinarySensorEntityDescription(
key="dhcp_client",
translation_key="dhcp_client",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.dhcpc,
),
AirOSBinarySensorEntityDescription(
key="dhcp_server",
translation_key="dhcp_server",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.dhcpd,
entity_registry_enabled_default=False,
),
AirOSBinarySensorEntityDescription(
key="dhcp6_server",
translation_key="dhcp6_server",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.dhcp6d_stateful,
entity_registry_enabled_default=False,
),
AirOSBinarySensorEntityDescription(
key="pppoe",
translation_key="pppoe",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.services.pppoe,
entity_registry_enabled_default=False,
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AirOSConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the AirOS binary sensors from a config entry."""
coordinator = config_entry.runtime_data
async_add_entities(
AirOSBinarySensor(coordinator, description) for description in BINARY_SENSORS
)
class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
"""Representation of a binary sensor."""
entity_description: AirOSBinarySensorEntityDescription
def __init__(
self,
coordinator: AirOSDataUpdateCoordinator,
description: AirOSBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}"
@property
def is_on(self) -> bool:
"""Return the state of the binary sensor."""
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -6,11 +6,11 @@ import logging
from typing import Any
from airos.exceptions import (
ConnectionAuthenticationError,
ConnectionSetupError,
DataMissingError,
DeviceConnectionError,
KeyDataMissingError,
AirOSConnectionAuthenticationError,
AirOSConnectionSetupError,
AirOSDataMissingError,
AirOSDeviceConnectionError,
AirOSKeyDataMissingError,
)
import voluptuous as vol
@@ -59,13 +59,13 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
airos_data = await airos_device.status()
except (
ConnectionSetupError,
DeviceConnectionError,
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
errors["base"] = "cannot_connect"
except (ConnectionAuthenticationError, DataMissingError):
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
errors["base"] = "invalid_auth"
except KeyDataMissingError:
except AirOSKeyDataMissingError:
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception")

View File

@@ -6,10 +6,10 @@ import logging
from airos.airos8 import AirOS, AirOSData
from airos.exceptions import (
ConnectionAuthenticationError,
ConnectionSetupError,
DataMissingError,
DeviceConnectionError,
AirOSConnectionAuthenticationError,
AirOSConnectionSetupError,
AirOSDataMissingError,
AirOSDeviceConnectionError,
)
from homeassistant.config_entries import ConfigEntry
@@ -47,18 +47,22 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
try:
await self.airos_device.login()
return await self.airos_device.status()
except (ConnectionAuthenticationError,) as err:
except (AirOSConnectionAuthenticationError,) as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (ConnectionSetupError, DeviceConnectionError, TimeoutError) as err:
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
TimeoutError,
) as err:
_LOGGER.error("Error connecting to airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
) from err
except (DataMissingError,) as err:
except (AirOSDataMissingError,) as err:
_LOGGER.error("Expected data not returned by airOS device: %s", err)
raise UpdateFailed(
translation_domain=DOMAIN,

View File

@@ -0,0 +1,33 @@
"""Diagnostics support for airOS."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from .coordinator import AirOSConfigEntry
IP_REDACT = ["addr", "ipaddr", "ip6addr", "lastip"] # IP related
HW_REDACT = ["apmac", "hwaddr", "mac"] # MAC address
TO_REDACT_HA = [CONF_HOST, CONF_PASSWORD]
TO_REDACT_AIROS = [
"hostname", # Prevent leaking device naming
"essid", # Network SSID
"lat", # GPS latitude to prevent exposing location data.
"lon", # GPS longitude to prevent exposing location data.
*HW_REDACT,
*IP_REDACT,
]
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: AirOSConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
return {
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.2.1"]
"requirements": ["airos==0.3.0"]
}

View File

@@ -41,7 +41,7 @@ rules:
# Gold
devices: done
diagnostics: todo
diagnostics: done
discovery-update-info: todo
discovery: todo
docs-data-update: done
@@ -54,9 +54,7 @@ rules:
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: todo
comment: prepared binary_sensors will provide this
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations:

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
import logging
from airos.data import NetRole, WirelessMode
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -19,6 +19,8 @@ from homeassistant.const import (
SIGNAL_STRENGTH_DECIBELS,
UnitOfDataRate,
UnitOfFrequency,
UnitOfLength,
UnitOfTime,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -29,8 +31,11 @@ from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
WIRELESS_MODE_OPTIONS = [mode.value.replace("-", "_").lower() for mode in WirelessMode]
NETROLE_OPTIONS = [mode.value for mode in NetRole]
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
@@ -46,6 +51,7 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
translation_key="host_cpuload",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
value_fn=lambda data: data.host.cpuload,
entity_registry_enabled_default=False,
),
@@ -69,13 +75,6 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
translation_key="wireless_essid",
value_fn=lambda data: data.wireless.essid,
),
AirOSSensorEntityDescription(
key="wireless_mode",
translation_key="wireless_mode",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.wireless.mode.value.replace("-", "_").lower(),
options=WIRELESS_MODE_OPTIONS,
),
AirOSSensorEntityDescription(
key="wireless_antenna_gain",
translation_key="wireless_antenna_gain",
@@ -90,6 +89,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.tx,
),
AirOSSensorEntityDescription(
@@ -98,6 +99,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.throughput.rx,
),
AirOSSensorEntityDescription(
@@ -106,6 +109,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.dl_capacity,
),
AirOSSensorEntityDescription(
@@ -114,8 +119,45 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
AirOSSensorEntityDescription(
key="host_uptime",
translation_key="host_uptime",
native_unit_of_measurement=UnitOfTime.SECONDS,
device_class=SensorDeviceClass.DURATION,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda data: data.host.uptime,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_distance",
translation_key="wireless_distance",
native_unit_of_measurement=UnitOfLength.METERS,
device_class=SensorDeviceClass.DISTANCE,
suggested_display_precision=1,
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
value_fn=lambda data: data.wireless.distance,
),
AirOSSensorEntityDescription(
key="wireless_mode",
translation_key="wireless_mode",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.mode.value,
options=WIRELESS_MODE_OPTIONS,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_role",
translation_key="wireless_role",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.role.value,
options=WIRELESS_ROLE_OPTIONS,
entity_registry_enabled_default=False,
),
)

View File

@@ -26,6 +26,23 @@
}
},
"entity": {
"binary_sensor": {
"port_forwarding": {
"name": "Port forwarding"
},
"dhcp_client": {
"name": "DHCP client"
},
"dhcp_server": {
"name": "DHCP server"
},
"dhcp6_server": {
"name": "DHCPv6 server"
},
"pppoe": {
"name": "PPPoE link"
}
},
"sensor": {
"host_cpuload": {
"name": "CPU load"
@@ -43,13 +60,6 @@
"wireless_essid": {
"name": "Wireless SSID"
},
"wireless_mode": {
"name": "Wireless mode",
"state": {
"ap_ptp": "Access point",
"sta_ptp": "Station"
}
},
"wireless_antenna_gain": {
"name": "Antenna gain"
},
@@ -67,6 +77,26 @@
},
"wireless_remote_hostname": {
"name": "Remote hostname"
},
"host_uptime": {
"name": "Uptime"
},
"wireless_distance": {
"name": "Wireless distance"
},
"wireless_role": {
"name": "Wireless role",
"state": {
"access_point": "Access point",
"station": "Station"
}
},
"wireless_mode": {
"name": "Wireless mode",
"state": {
"point_to_point": "Point-to-point",
"point_to_multipoint": "Point-to-multipoint"
}
}
}
},

View File

@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
from .coordinator import AirQCoordinator
PLATFORMS: list[Platform] = [Platform.SENSOR]
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
AirQConfigEntry = ConfigEntry[AirQCoordinator]

View File

@@ -75,6 +75,7 @@ class AirQCoordinator(DataUpdateCoordinator):
return_average=self.return_average,
clip_negative_values=self.clip_negative,
)
data["brightness"] = await self.airq.get_current_brightness()
if warming_up_sensors := identify_warming_up_sensors(data):
_LOGGER.debug(
"Following sensors are still warming up: %s", warming_up_sensors

View File

@@ -0,0 +1,85 @@
"""Definition of air-Q number platform used to control the LED strips."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
from aioairq.core import AirQ
from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirQConfigEntry, AirQCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class AirQBrightnessDescription(NumberEntityDescription):
"""Describes AirQ number entity responsible for brightness control."""
value: Callable[[dict], float]
set_value: Callable[[AirQ, float], Awaitable[None]]
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
key="airq_led_brightness",
translation_key="airq_led_brightness",
native_min_value=0.0,
native_max_value=100.0,
native_step=1.0,
native_unit_of_measurement=PERCENTAGE,
value=lambda data: data["brightness"],
set_value=lambda device, value: device.set_current_brightness(value),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AirQConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up number entities: a single entity for the LEDs."""
coordinator = entry.runtime_data
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
async_add_entities(entities)
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
"""Representation of the LEDs from a single AirQ."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AirQCoordinator,
description: AirQBrightnessDescription,
) -> None:
"""Initialize a single sensor."""
super().__init__(coordinator)
self.entity_description: AirQBrightnessDescription = description
self._attr_device_info = coordinator.device_info
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
@property
def native_value(self) -> float:
"""Return the brightness of the LEDs in %."""
return self.entity_description.value(self.coordinator.data)
async def async_set_native_value(self, value: float) -> None:
"""Set the brightness of the LEDs to the value in %."""
_LOGGER.debug(
"Changing LED brighntess from %.0f%% to %.0f%%",
self.coordinator.data["brightness"],
value,
)
await self.entity_description.set_value(self.coordinator.airq, value)
await self.coordinator.async_request_refresh()

View File

@@ -35,6 +35,11 @@
}
},
"entity": {
"number": {
"airq_led_brightness": {
"name": "LED brightness"
}
},
"sensor": {
"acetaldehyde": {
"name": "Acetaldehyde"

View File

@@ -7,21 +7,18 @@ import logging
from airthings import Airthings
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_SECRET
from .coordinator import AirthingsDataUpdateCoordinator
from .coordinator import AirthingsConfigEntry, AirthingsDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[Platform] = [Platform.SENSOR]
SCAN_INTERVAL = timedelta(minutes=6)
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
"""Set up Airthings from a config entry."""
@@ -31,7 +28,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
async_get_clientsession(hass),
)
coordinator = AirthingsDataUpdateCoordinator(hass, airthings)
coordinator = AirthingsDataUpdateCoordinator(hass, airthings, entry)
await coordinator.async_config_entry_first_refresh()

View File

@@ -5,6 +5,7 @@ import logging
from airthings import Airthings, AirthingsDevice, AirthingsError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -13,15 +14,23 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=6)
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
"""Coordinator for Airthings data updates."""
def __init__(self, hass: HomeAssistant, airthings: Airthings) -> None:
def __init__(
self,
hass: HomeAssistant,
airthings: Airthings,
config_entry: AirthingsConfigEntry,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_method=self._update_method,
update_interval=SCAN_INTERVAL,

View File

@@ -9,7 +9,6 @@ DOMAIN: Final = "amberelectric"
CONF_SITE_NAME = "site_name"
CONF_SITE_ID = "site_id"
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
ATTR_CHANNEL_TYPE = "channel_type"
ATTRIBUTION = "Data provided by Amber Electric"

View File

@@ -4,6 +4,7 @@ from amberelectric.models.channel import ChannelType
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
from homeassistant.core import (
HomeAssistant,
ServiceCall,
@@ -16,7 +17,6 @@ from homeassistant.util.json import JsonValueType
from .const import (
ATTR_CHANNEL_TYPE,
ATTR_CONFIG_ENTRY_ID,
CONTROLLED_LOAD_CHANNEL,
DOMAIN,
FEED_IN_CHANNEL,

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from aioambient.util import get_public_device_id
from homeassistant.core import callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity, EntityDescription
@@ -37,6 +37,7 @@ class AmbientWeatherEntity(Entity):
identifiers={(DOMAIN, mac_address)},
manufacturer="Ambient Weather",
name=station_name.capitalize(),
connections={(CONNECTION_NETWORK_MAC, mac_address)},
)
self._attr_unique_id = f"{mac_address}_{description.key}"

View File

@@ -390,7 +390,6 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return the devices payload."""
integrations_without_model_id: set[str] = set()
devices: list[dict[str, Any]] = []
dev_reg = dr.async_get(hass)
# Devices that need via device info set
@@ -400,10 +399,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
seen_integrations = set()
for device in dev_reg.devices.values():
# Ignore services
if device.entry_type:
continue
if not device.primary_config_entry:
continue
@@ -414,13 +409,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
seen_integrations.add(config_entry.domain)
if not device.model_id:
integrations_without_model_id.add(config_entry.domain)
continue
if not device.manufacturer:
continue
new_indexes[device.id] = len(devices)
devices.append(
{
@@ -430,11 +418,12 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
"model": device.model,
"sw_version": device.sw_version,
"hw_version": device.hw_version,
"has_suggested_area": device.suggested_area is not None,
"has_configuration_url": device.configuration_url is not None,
"via_device": None,
"entry_type": device.entry_type.value if device.entry_type else None,
}
)
if device.via_device_id:
via_devices[device.id] = device.via_device_id
@@ -454,15 +443,11 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
for device_info in devices:
if integration := integrations.get(device_info["integration"]):
device_info["is_custom_integration"] = not integration.is_built_in
# Include version for custom integrations
if not integration.is_built_in and integration.version:
device_info["custom_integration_version"] = str(integration.version)
return {
"version": "home-assistant:1",
"no_model_id": sorted(
[
domain
for domain in integrations_without_model_id
if domain in integrations and integrations[domain].is_built_in
]
),
"devices": devices,
}

View File

@@ -30,10 +30,9 @@ class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]):
cam: PyDroidIPCam,
) -> None:
"""Initialize the Android IP Webcam."""
self.hass = hass
self.cam = cam
super().__init__(
self.hass,
hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",

View File

@@ -81,11 +81,15 @@ async def async_update_options(
async def async_migrate_integration(hass: HomeAssistant) -> None:
"""Migrate integration entry structure."""
entries = hass.config_entries.async_entries(DOMAIN)
# Make sure we get enabled config entries first
entries = sorted(
hass.config_entries.async_entries(DOMAIN),
key=lambda e: e.disabled_by is not None,
)
if not any(entry.version == 1 for entry in entries):
return
api_keys_entries: dict[str, ConfigEntry] = {}
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
@@ -99,30 +103,61 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
)
if entry.data[CONF_API_KEY] not in api_keys_entries:
use_existing = True
api_keys_entries[entry.data[CONF_API_KEY]] = entry
all_disabled = all(
e.disabled_by is not None
for e in entries
if e.data[CONF_API_KEY] == entry.data[CONF_API_KEY]
)
api_keys_entries[entry.data[CONF_API_KEY]] = (entry, all_disabled)
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
parent_entry, all_disabled = api_keys_entries[entry.data[CONF_API_KEY]]
hass.config_entries.async_add_subentry(parent_entry, subentry)
conversation_entity = entity_registry.async_get_entity_id(
conversation_entity_id = entity_registry.async_get_entity_id(
"conversation",
DOMAIN,
entry.entry_id,
)
if conversation_entity is not None:
entity_registry.async_update_entity(
conversation_entity,
config_entry_id=parent_entry.entry_id,
config_subentry_id=subentry.subentry_id,
new_unique_id=subentry.subentry_id,
)
device = device_registry.async_get_device(
identifiers={(DOMAIN, entry.entry_id)}
)
if conversation_entity_id is not None:
conversation_entity_entry = entity_registry.entities[conversation_entity_id]
entity_disabled_by = conversation_entity_entry.disabled_by
if (
entity_disabled_by is er.RegistryEntryDisabler.CONFIG_ENTRY
and not all_disabled
):
# Device and entity registries don't update the disabled_by flag
# when moving a device or entity from one config entry to another,
# so we need to do it manually.
entity_disabled_by = (
er.RegistryEntryDisabler.DEVICE
if device
else er.RegistryEntryDisabler.USER
)
entity_registry.async_update_entity(
conversation_entity_id,
config_entry_id=parent_entry.entry_id,
config_subentry_id=subentry.subentry_id,
disabled_by=entity_disabled_by,
new_unique_id=subentry.subentry_id,
)
if device is not None:
# Device and entity registries don't update the disabled_by flag when
# moving a device or entity from one config entry to another, so we
# need to do it manually.
device_disabled_by = device.disabled_by
if (
device.disabled_by is dr.DeviceEntryDisabler.CONFIG_ENTRY
and not all_disabled
):
device_disabled_by = dr.DeviceEntryDisabler.USER
device_registry.async_update_device(
device.id,
disabled_by=device_disabled_by,
new_identifiers={(DOMAIN, subentry.subentry_id)},
add_config_subentry_id=subentry.subentry_id,
add_config_entry_id=parent_entry.entry_id,
@@ -147,7 +182,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
title=DEFAULT_CONVERSATION_NAME,
options={},
version=2,
minor_version=2,
minor_version=3,
)
@@ -173,6 +208,38 @@ async def async_migrate_entry(hass: HomeAssistant, entry: AnthropicConfigEntry)
hass.config_entries.async_update_entry(entry, minor_version=2)
if entry.version == 2 and entry.minor_version == 2:
# Fix migration where the disabled_by flag was not set correctly.
# We can currently only correct this for enabled config entries,
# because migration does not run for disabled config entries. This
# is asserted in tests, and if that behavior is changed, we should
# correct also disabled config entries.
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
devices = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
entity_entries = er.async_entries_for_config_entry(
entity_registry, entry.entry_id
)
if entry.disabled_by is None:
# If the config entry is not disabled, we need to set the disabled_by
# flag on devices to USER, and on entities to DEVICE, if they are set
# to CONFIG_ENTRY.
for device in devices:
if device.disabled_by is not dr.DeviceEntryDisabler.CONFIG_ENTRY:
continue
device_registry.async_update_device(
device.id,
disabled_by=dr.DeviceEntryDisabler.USER,
)
for entity in entity_entries:
if entity.disabled_by is not er.RegistryEntryDisabler.CONFIG_ENTRY:
continue
entity_registry.async_update_entity(
entity.entity_id,
disabled_by=er.RegistryEntryDisabler.DEVICE,
)
hass.config_entries.async_update_entry(entry, minor_version=3)
LOGGER.debug(
"Migration to version %s:%s successful", entry.version, entry.minor_version
)

View File

@@ -75,7 +75,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anthropic."""
VERSION = 2
MINOR_VERSION = 2
MINOR_VERSION = 3
async def async_step_user(
self, user_input: dict[str, Any] | None = None

View File

@@ -20,10 +20,8 @@ RECOMMENDED_THINKING_BUDGET = 0
MIN_THINKING_BUDGET = 1024
THINKING_MODELS = [
"claude-3-7-sonnet-20250219",
"claude-3-7-sonnet-latest",
"claude-opus-4-20250514",
"claude-opus-4-0",
"claude-sonnet-4-20250514",
"claude-3-7-sonnet",
"claude-sonnet-4-0",
"claude-opus-4-0",
"claude-opus-4-1",
]

View File

@@ -2,11 +2,10 @@
from collections.abc import AsyncGenerator, Callable, Iterable
import json
from typing import Any, cast
from typing import Any
import anthropic
from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
MessageDeltaUsage,
@@ -17,7 +16,6 @@ from anthropic.types import (
RawContentBlockStopEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock,
RedactedThinkingBlockParam,
SignatureDelta,
@@ -35,6 +33,7 @@ from anthropic.types import (
ToolUseBlockParam,
Usage,
)
from anthropic.types.message_create_params import MessageCreateParamsStreaming
from voluptuous_openapi import convert
from homeassistant.components import conversation
@@ -129,6 +128,28 @@ def _convert_content(
)
)
if isinstance(content.native, ThinkingBlock):
messages[-1]["content"].append( # type: ignore[union-attr]
ThinkingBlockParam(
type="thinking",
thinking=content.thinking_content or "",
signature=content.native.signature,
)
)
elif isinstance(content.native, RedactedThinkingBlock):
redacted_thinking_block = RedactedThinkingBlockParam(
type="redacted_thinking",
data=content.native.data,
)
if isinstance(messages[-1]["content"], str):
messages[-1]["content"] = [
TextBlockParam(type="text", text=messages[-1]["content"]),
redacted_thinking_block,
]
else:
messages[-1]["content"].append( # type: ignore[attr-defined]
redacted_thinking_block
)
if content.content:
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(type="text", text=content.content)
@@ -152,10 +173,9 @@ def _convert_content(
return messages
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
async def _transform_stream(
chat_log: conversation.ChatLog,
result: AsyncStream[MessageStreamEvent],
messages: list[MessageParam],
stream: AsyncStream[MessageStreamEvent],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
"""Transform the response stream into HA format.
@@ -186,31 +206,25 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
Each message could contain multiple blocks of the same type.
"""
if result is None:
if stream is None:
raise TypeError("Expected a stream of messages")
current_message: MessageParam | None = None
current_block: (
TextBlockParam
| ToolUseBlockParam
| ThinkingBlockParam
| RedactedThinkingBlockParam
| None
) = None
current_tool_block: ToolUseBlockParam | None = None
current_tool_args: str
input_usage: Usage | None = None
has_content = False
has_native = False
async for response in result:
async for response in stream:
LOGGER.debug("Received response: %s", response)
if isinstance(response, RawMessageStartEvent):
if response.message.role != "assistant":
raise ValueError("Unexpected message role")
current_message = MessageParam(role=response.message.role, content=[])
input_usage = response.message.usage
elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_block = ToolUseBlockParam(
current_tool_block = ToolUseBlockParam(
type="tool_use",
id=response.content_block.id,
name=response.content_block.name,
@@ -218,75 +232,64 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
)
current_tool_args = ""
elif isinstance(response.content_block, TextBlock):
current_block = TextBlockParam(
type="text", text=response.content_block.text
)
yield {"role": "assistant"}
if has_content:
yield {"role": "assistant"}
has_native = False
has_content = True
if response.content_block.text:
yield {"content": response.content_block.text}
elif isinstance(response.content_block, ThinkingBlock):
current_block = ThinkingBlockParam(
type="thinking",
thinking=response.content_block.thinking,
signature=response.content_block.signature,
)
if has_native:
yield {"role": "assistant"}
has_native = False
has_content = False
elif isinstance(response.content_block, RedactedThinkingBlock):
current_block = RedactedThinkingBlockParam(
type="redacted_thinking", data=response.content_block.data
)
LOGGER.debug(
"Some of Claudes internal reasoning has been automatically "
"encrypted for safety reasons. This doesnt affect the quality of "
"responses"
)
if has_native:
yield {"role": "assistant"}
has_native = False
has_content = False
yield {"native": response.content_block}
has_native = True
elif isinstance(response, RawContentBlockDeltaEvent):
if current_block is None:
raise ValueError("Unexpected delta without a block")
if isinstance(response.delta, InputJSONDelta):
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
text_block = cast(TextBlockParam, current_block)
text_block["text"] += response.delta.text
yield {"content": response.delta.text}
elif isinstance(response.delta, ThinkingDelta):
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["thinking"] += response.delta.thinking
yield {"thinking_content": response.delta.thinking}
elif isinstance(response.delta, SignatureDelta):
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["signature"] += response.delta.signature
yield {
"native": ThinkingBlock(
type="thinking",
thinking="",
signature=response.delta.signature,
)
}
has_native = True
elif isinstance(response, RawContentBlockStopEvent):
if current_block is None:
raise ValueError("Unexpected stop event without a current block")
if current_block["type"] == "tool_use":
# tool block
if current_tool_block is not None:
tool_args = json.loads(current_tool_args) if current_tool_args else {}
current_block["input"] = tool_args
current_tool_block["input"] = tool_args
yield {
"tool_calls": [
llm.ToolInput(
id=current_block["id"],
tool_name=current_block["name"],
id=current_tool_block["id"],
tool_name=current_tool_block["name"],
tool_args=tool_args,
)
]
}
elif current_block["type"] == "thinking":
# thinking block
LOGGER.debug("Thinking: %s", current_block["thinking"])
if current_message is None:
raise ValueError("Unexpected stop event without a current message")
current_message["content"].append(current_block) # type: ignore[union-attr]
current_block = None
current_tool_block = None
elif isinstance(response, RawMessageDeltaEvent):
if (usage := response.usage) is not None:
chat_log.async_trace(_create_token_stats(input_usage, usage))
if response.delta.stop_reason == "refusal":
raise HomeAssistantError("Potential policy violation detected")
elif isinstance(response, RawMessageStopEvent):
if current_message is not None:
messages.append(current_message)
current_message = None
def _create_token_stats(
@@ -351,45 +354,48 @@ class AnthropicBaseLLMEntity(Entity):
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model_args = MessageCreateParamsStreaming(
model=model,
messages=messages,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
stream=True,
)
if tools:
model_args["tools"] = tools
if (
model.startswith(tuple(THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET
):
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
model_args = {
"model": model,
"messages": messages,
"tools": tools or NOT_GIVEN,
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
"system": system.content,
"stream": True,
}
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
try:
stream = await client.messages.create(**model_args)
messages.extend(
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_log, stream),
)
]
)
)
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"
) from err
messages.extend(
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_log, stream, messages),
)
if not isinstance(content, conversation.AssistantContent)
]
)
)
if not chat_log.unresponded_tool_results:
break

View File

@@ -8,5 +8,5 @@
"documentation": "https://www.home-assistant.io/integrations/anthropic",
"integration_type": "service",
"iot_class": "cloud_polling",
"requirements": ["anthropic==0.52.0"]
"requirements": ["anthropic==0.62.0"]
}

View File

@@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
"iot_class": "local_polling",
"loggers": ["apcaccess"],
"quality_scale": "bronze",
"requirements": ["aioapcaccess==0.4.2"]
}

View File

@@ -0,0 +1,93 @@
rules:
# Bronze
action-setup: done
appropriate-polling: done
brands: done
common-modules:
status: done
comment: |
Consider deriving a base entity.
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
The integration does not provide any actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
The integration does not provide any actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: |
The integration does not provide any additional options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: |
The integration does not require authentication.
test-coverage:
status: todo
comment: |
Patch `aioapcaccess.request_status` where we use it.
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
This integration cannot be discovered.
discovery:
status: exempt
comment: |
This integration cannot be discovered.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
The integration connects to a single service per configuration entry.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: done
repair-issues: done
stale-devices:
status: exempt
comment: |
This integration connect to a single service per configuration entry.
# Platinum
async-dependency: done
inject-websession:
status: exempt
comment: |
The integration does not connect via HTTP.
strict-typing: done

View File

@@ -14,7 +14,22 @@
"host": "[%key:common::config_flow::data::host%]",
"port": "[%key:common::config_flow::data::port%]"
},
"data_description": {
"host": "The hostname or IP address of the APC UPS Daemon",
"port": "The port the APC UPS Daemon is listening on"
},
"description": "Enter the host and port on which the apcupsd NIS is being served."
},
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
"port": "[%key:common::config_flow::data::port%]"
},
"data_description": {
"host": "[%key:component::apcupsd::config::step::user::data_description::host%]",
"port": "[%key:component::apcupsd::config::step::user::data_description::port%]"
},
"description": "[%key:component::apcupsd::config::step::user::description%]"
}
}
},

View File

@@ -11,7 +11,7 @@ import time
from typing import Any, Literal, final
from hassil import Intents, recognize
from hassil.expression import Expression, ListReference, Sequence
from hassil.expression import Expression, Group, ListReference
from hassil.intents import WildcardSlotList
from homeassistant.components import conversation, media_source, stt, tts
@@ -413,7 +413,7 @@ class AssistSatelliteEntity(entity.Entity):
for intent in intents.intents.values():
for intent_data in intent.data:
for sentence in intent_data.sentences:
_collect_list_references(sentence, wildcard_names)
_collect_list_references(sentence.expression, wildcard_names)
for wildcard_name in wildcard_names:
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
@@ -727,9 +727,9 @@ class AssistSatelliteEntity(entity.Entity):
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
"""Collect list reference names recursively."""
if isinstance(expression, Sequence):
seq: Sequence = expression
for item in seq.items:
if isinstance(expression, Group):
grp: Group = expression
for item in grp.items:
_collect_list_references(item, list_names)
elif isinstance(expression, ListReference):
# {list}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==2.2.3"]
"requirements": ["hassil==3.1.0"]
}

View File

@@ -5,15 +5,16 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from collections import namedtuple
from collections.abc import Awaitable, Callable, Coroutine
from datetime import datetime
import functools
import logging
from typing import Any, cast
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
from aiohttp import ClientSession
from pyasuswrt import AsusWrtError, AsusWrtHttp
from pyasuswrt.exceptions import AsusWrtNotAvailableInfoError
from asusrouter import AsusRouter, AsusRouterError
from asusrouter.modules.client import AsusClient
from asusrouter.modules.data import AsusData
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
from homeassistant.const import (
CONF_HOST,
@@ -41,14 +42,13 @@ from .const import (
PROTOCOL_HTTPS,
PROTOCOL_TELNET,
SENSORS_BYTES,
SENSORS_CPU,
SENSORS_LOAD_AVG,
SENSORS_MEMORY,
SENSORS_RATES,
SENSORS_TEMPERATURES,
SENSORS_TEMPERATURES_LEGACY,
SENSORS_UPTIME,
)
from .helpers import clean_dict, translate_to_legacy
SENSORS_TYPE_BYTES = "sensors_bytes"
SENSORS_TYPE_COUNT = "sensors_count"
@@ -310,16 +310,16 @@ class AsusWrtHttpBridge(AsusWrtBridge):
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
"""Initialize Bridge that use HTTP library."""
super().__init__(conf[CONF_HOST])
self._api: AsusWrtHttp = self._get_api(conf, session)
self._api = self._get_api(conf, session)
@staticmethod
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp:
"""Get the AsusWrtHttp API."""
return AsusWrtHttp(
conf[CONF_HOST],
conf[CONF_USERNAME],
conf.get(CONF_PASSWORD, ""),
use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusRouter:
"""Get the AsusRouter API."""
return AsusRouter(
hostname=conf[CONF_HOST],
username=conf[CONF_USERNAME],
password=conf.get(CONF_PASSWORD, ""),
use_ssl=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
port=conf.get(CONF_PORT),
session=session,
)
@@ -327,46 +327,90 @@ class AsusWrtHttpBridge(AsusWrtBridge):
@property
def is_connected(self) -> bool:
"""Get connected status."""
return cast(bool, self._api.is_connected)
return self._api.connected
async def async_connect(self) -> None:
"""Connect to the device."""
await self._api.async_connect()
# Collect the identity
_identity = await self._api.async_get_identity()
# get main router properties
if mac := self._api.mac:
if mac := _identity.mac:
self._label_mac = format_mac(mac)
self._firmware = self._api.firmware
self._model = self._api.model
self._firmware = str(_identity.firmware)
self._model = _identity.model
async def async_disconnect(self) -> None:
"""Disconnect to the device."""
await self._api.async_disconnect()
async def _get_data(
self,
datatype: AsusData,
force: bool = False,
) -> dict[str, Any]:
"""Get data from the device.
This is a generic method which automatically converts to
the Home Assistant-compatible format.
"""
try:
raw = await self._api.async_get_data(datatype, force=force)
return translate_to_legacy(clean_dict(convert_to_ha_data(raw)))
except AsusRouterError as ex:
raise UpdateFailed(ex) from ex
async def _get_sensors(self, datatype: AsusData) -> list[str]:
"""Get the available sensors.
This is a generic method which automatically converts to
the Home Assistant-compatible format.
"""
sensors = []
try:
data = await self._api.async_get_data(datatype)
# Get the list of sensors from the raw data
# and translate in to the legacy format
sensors = translate_to_legacy(convert_to_ha_sensors(data, datatype))
_LOGGER.debug("Available `%s` sensors: %s", datatype.value, sensors)
except AsusRouterError as ex:
_LOGGER.warning(
"Cannot get available `%s` sensors with exception: %s",
datatype.value,
ex,
)
return sensors
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
"""Get list of connected devices."""
api_devices = await self._api.async_get_connected_devices()
api_devices: dict[str, AsusClient] = await self._api.async_get_data(
AsusData.CLIENTS, force=True
)
return {
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
format_mac(mac): WrtDevice(
dev.connection.ip_address, dev.description.name, dev.connection.node
)
for mac, dev in api_devices.items()
if dev.connection is not None
and dev.description is not None
and dev.connection.ip_address is not None
}
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
"""Return a dictionary of available sensors for this bridge."""
sensors_cpu = await self._get_available_cpu_sensors()
sensors_temperatures = await self._get_available_temperature_sensors()
sensors_loadavg = await self._get_loadavg_sensors_availability()
return {
SENSORS_TYPE_BYTES: {
KEY_SENSORS: SENSORS_BYTES,
KEY_METHOD: self._get_bytes,
},
SENSORS_TYPE_CPU: {
KEY_SENSORS: sensors_cpu,
KEY_SENSORS: await self._get_sensors(AsusData.CPU),
KEY_METHOD: self._get_cpu_usage,
},
SENSORS_TYPE_LOAD_AVG: {
KEY_SENSORS: sensors_loadavg,
KEY_SENSORS: await self._get_sensors(AsusData.SYSINFO),
KEY_METHOD: self._get_load_avg,
},
SENSORS_TYPE_MEMORY: {
@@ -382,95 +426,44 @@ class AsusWrtHttpBridge(AsusWrtBridge):
KEY_METHOD: self._get_uptime,
},
SENSORS_TYPE_TEMPERATURES: {
KEY_SENSORS: sensors_temperatures,
KEY_SENSORS: await self._get_sensors(AsusData.TEMPERATURE),
KEY_METHOD: self._get_temperatures,
},
}
async def _get_available_cpu_sensors(self) -> list[str]:
"""Check which cpu information is available on the router."""
try:
available_cpu = await self._api.async_get_cpu_usage()
available_sensors = [t for t in SENSORS_CPU if t in available_cpu]
except AsusWrtError as exc:
_LOGGER.warning(
(
"Failed checking cpu sensor availability for ASUS router"
" %s. Exception: %s"
),
self.host,
exc,
)
return []
return available_sensors
async def _get_available_temperature_sensors(self) -> list[str]:
"""Check which temperature information is available on the router."""
try:
available_temps = await self._api.async_get_temperatures()
available_sensors = [
t for t in SENSORS_TEMPERATURES if t in available_temps
]
except AsusWrtError as exc:
_LOGGER.warning(
(
"Failed checking temperature sensor availability for ASUS router"
" %s. Exception: %s"
),
self.host,
exc,
)
return []
return available_sensors
async def _get_loadavg_sensors_availability(self) -> list[str]:
"""Check if load avg is available on the router."""
try:
await self._api.async_get_loadavg()
except AsusWrtNotAvailableInfoError:
return []
except AsusWrtError:
pass
return SENSORS_LOAD_AVG
@handle_errors_and_zip(AsusWrtError, SENSORS_BYTES)
async def _get_bytes(self) -> Any:
"""Fetch byte information from the router."""
return await self._api.async_get_traffic_bytes()
return await self._get_data(AsusData.NETWORK)
@handle_errors_and_zip(AsusWrtError, SENSORS_RATES)
async def _get_rates(self) -> Any:
"""Fetch rates information from the router."""
return await self._api.async_get_traffic_rates()
data = await self._get_data(AsusData.NETWORK)
# Convert from bits/s to Bytes/s for compatibility with legacy sensors
return {
key: (
value / 8
if key in SENSORS_RATES and isinstance(value, (int, float))
else value
)
for key, value in data.items()
}
@handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG)
async def _get_load_avg(self) -> Any:
"""Fetch cpu load avg information from the router."""
return await self._api.async_get_loadavg()
return await self._get_data(AsusData.SYSINFO)
@handle_errors_and_zip(AsusWrtError, None)
async def _get_temperatures(self) -> Any:
"""Fetch temperatures information from the router."""
return await self._api.async_get_temperatures()
return await self._get_data(AsusData.TEMPERATURE)
@handle_errors_and_zip(AsusWrtError, None)
async def _get_cpu_usage(self) -> Any:
"""Fetch cpu information from the router."""
return await self._api.async_get_cpu_usage()
return await self._get_data(AsusData.CPU)
@handle_errors_and_zip(AsusWrtError, None)
async def _get_memory_usage(self) -> Any:
"""Fetch memory information from the router."""
return await self._api.async_get_memory_usage()
return await self._get_data(AsusData.RAM)
async def _get_uptime(self) -> dict[str, Any]:
"""Fetch uptime from the router."""
try:
uptimes = await self._api.async_get_uptime()
except AsusWrtError as exc:
raise UpdateFailed(exc) from exc
last_boot = datetime.fromisoformat(uptimes["last_boot"])
uptime = uptimes["uptime"]
return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False))
return await self._get_data(AsusData.BOOTTIME)

View File

@@ -7,7 +7,7 @@ import os
import socket
from typing import Any, cast
from pyasuswrt import AsusWrtError
from asusrouter import AsusRouterError
import voluptuous as vol
from homeassistant.components.device_tracker import (
@@ -189,7 +189,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
try:
await api.async_connect()
except (AsusWrtError, OSError):
except (AsusRouterError, OSError):
_LOGGER.error(
"Error connecting to the AsusWrt router at %s using protocol %s",
host,

View File

@@ -0,0 +1,56 @@
"""Helpers for AsusWRT integration."""
from __future__ import annotations
from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes",
"wan_tx": "sensor_tx_bytes",
"total_usage": "cpu_total_usage",
"usage": "mem_usage_perc",
"free": "mem_free",
"used": "mem_used",
"wan_rx_speed": "sensor_rx_rates",
"wan_tx_speed": "sensor_tx_rates",
"2ghz": "2.4GHz",
"5ghz": "5.0GHz",
"5ghz2": "5.0GHz_2",
"6ghz": "6.0GHz",
"cpu": "CPU",
"datetime": "sensor_last_boot",
"uptime": "sensor_uptime",
**{f"{num}_usage": f"cpu{num}_usage" for num in range(1, 9)},
**{f"load_avg_{load}": f"sensor_load_avg{load}" for load in ("1", "5", "15")},
}
def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
"""Cleans dictionary from None values.
The `state` key is always preserved regardless of its value.
"""
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists."""
if raw is None:
return None
if isinstance(raw, dict):
return {TRANSLATION_MAP.get(k, k): v for k, v in raw.items()}
if isinstance(raw, list):
return [
TRANSLATION_MAP[item]
if isinstance(item, str) and item in TRANSLATION_MAP
else item
for item in raw
]
return raw

View File

@@ -1,11 +1,11 @@
{
"domain": "asuswrt",
"name": "ASUSWRT",
"codeowners": ["@kennedyshead", "@ollo69"],
"codeowners": ["@kennedyshead", "@ollo69", "@Vaskivskyi"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioasuswrt", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.21"]
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.19.0"]
}

View File

@@ -5,9 +5,9 @@ from __future__ import annotations
from collections.abc import Callable, Mapping
from datetime import datetime, timedelta
import logging
from typing import Any
from typing import TYPE_CHECKING, Any
from pyasuswrt import AsusWrtError
from asusrouter import AsusRouterError
from homeassistant.components.device_tracker import (
CONF_CONSIDER_HOME,
@@ -40,6 +40,9 @@ from .const import (
SENSORS_CONNECTED_DEVICE,
)
if TYPE_CHECKING:
from . import AsusWrtConfigEntry
CONF_REQ_RELOAD = [CONF_DNSMASQ, CONF_INTERFACE, CONF_REQUIRE_IP]
SCAN_INTERVAL = timedelta(seconds=30)
@@ -52,10 +55,13 @@ _LOGGER = logging.getLogger(__name__)
class AsusWrtSensorDataHandler:
"""Data handler for AsusWrt sensor."""
def __init__(self, hass: HomeAssistant, api: AsusWrtBridge) -> None:
def __init__(
self, hass: HomeAssistant, api: AsusWrtBridge, entry: AsusWrtConfigEntry
) -> None:
"""Initialize a AsusWrt sensor data handler."""
self._hass = hass
self._api = api
self._entry = entry
self._connected_devices = 0
async def _get_connected_devices(self) -> dict[str, int]:
@@ -91,6 +97,7 @@ class AsusWrtSensorDataHandler:
update_method=method,
# Polling interval. Will only be polled if there are subscribers.
update_interval=SCAN_INTERVAL if should_poll else None,
config_entry=self._entry,
)
await coordinator.async_refresh()
@@ -222,7 +229,7 @@ class AsusWrtRouter:
"""Set up a AsusWrt router."""
try:
await self._api.async_connect()
except (AsusWrtError, OSError) as exc:
except (AsusRouterError, OSError) as exc:
raise ConfigEntryNotReady from exc
if not self._api.is_connected:
raise ConfigEntryNotReady
@@ -277,7 +284,7 @@ class AsusWrtRouter:
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
try:
wrt_devices = await self._api.async_get_connected_devices()
except (OSError, AsusWrtError) as exc:
except (OSError, AsusRouterError) as exc:
if not self._connect_error:
self._connect_error = True
_LOGGER.error(
@@ -321,7 +328,9 @@ class AsusWrtRouter:
if self._sensors_data_handler:
return
self._sensors_data_handler = AsusWrtSensorDataHandler(self.hass, self._api)
self._sensors_data_handler = AsusWrtSensorDataHandler(
self.hass, self._api, self._entry
)
self._sensors_data_handler.update_device_count(self._connected_devices)
sensors_types = await self._api.async_get_available_sensors()

View File

@@ -28,5 +28,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==8.10.0", "yalexs-ble==3.1.0"]
"requirements": ["yalexs==8.11.1", "yalexs-ble==3.1.2"]
}

View File

@@ -268,7 +268,7 @@ class LoginFlowBaseView(HomeAssistantView):
result.pop("data")
result.pop("context")
result_obj: Credentials = result.pop("result")
result_obj = result.pop("result")
# Result can be None if credential was never linked to a user before.
user = await hass.auth.async_get_user_by_credentials(result_obj)
@@ -281,7 +281,8 @@ class LoginFlowBaseView(HomeAssistantView):
)
process_success_login(request)
result["result"] = self._store_result(client_id, result_obj)
# We overwrite the Credentials object with the string code to retrieve it.
result["result"] = self._store_result(client_id, result_obj) # type: ignore[typeddict-item]
return self.json(result)

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from datetime import timedelta
import logging
API_ABS_HUMID = "abs_humid"
API_CO2 = "carbon_dioxide"
API_DEW_POINT = "dew_point"
API_DUST = "dust"

View File

@@ -18,6 +18,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_CONNECTIONS,
ATTR_SW_VERSION,
CONCENTRATION_GRAMS_PER_CUBIC_METER,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
@@ -33,6 +34,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
API_ABS_HUMID,
API_CO2,
API_DEW_POINT,
API_DUST,
@@ -120,6 +122,14 @@ SENSOR_TYPES: tuple[AwairSensorEntityDescription, ...] = (
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
AwairSensorEntityDescription(
key=API_ABS_HUMID,
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
unique_id_tag="absolute_humidity",
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
)
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (

View File

@@ -29,7 +29,7 @@
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["axis"],
"requirements": ["axis==64"],
"requirements": ["axis==65"],
"ssdp": [
{
"manufacturer": "AXIS"

View File

@@ -127,7 +127,6 @@ class BackupConfigData:
schedule=BackupSchedule(
days=days,
recurrence=ScheduleRecurrence(data["schedule"]["recurrence"]),
state=ScheduleState(data["schedule"].get("state", ScheduleState.NEVER)),
time=time,
),
)
@@ -453,7 +452,6 @@ class StoredBackupSchedule(TypedDict):
days: list[Day]
recurrence: ScheduleRecurrence
state: ScheduleState
time: str | None
@@ -462,7 +460,6 @@ class ScheduleParametersDict(TypedDict, total=False):
days: list[Day]
recurrence: ScheduleRecurrence
state: ScheduleState
time: dt.time | None
@@ -486,32 +483,12 @@ class ScheduleRecurrence(StrEnum):
CUSTOM_DAYS = "custom_days"
class ScheduleState(StrEnum):
"""Represent the schedule recurrence.
This is deprecated and can be remove in HA Core 2025.8.
"""
NEVER = "never"
DAILY = "daily"
MONDAY = "mon"
TUESDAY = "tue"
WEDNESDAY = "wed"
THURSDAY = "thu"
FRIDAY = "fri"
SATURDAY = "sat"
SUNDAY = "sun"
@dataclass(kw_only=True)
class BackupSchedule:
"""Represent the backup schedule."""
days: list[Day] = field(default_factory=list)
recurrence: ScheduleRecurrence = ScheduleRecurrence.NEVER
# Although no longer used, state is kept for backwards compatibility.
# It can be removed in HA Core 2025.8.
state: ScheduleState = ScheduleState.NEVER
time: dt.time | None = None
cron_event: CronSim | None = field(init=False, default=None)
next_automatic_backup: datetime | None = field(init=False, default=None)
@@ -610,7 +587,6 @@ class BackupSchedule:
return StoredBackupSchedule(
days=self.days,
recurrence=self.recurrence,
state=self.state,
time=self.time.isoformat() if self.time else None,
)

View File

@@ -1119,7 +1119,7 @@ class BackupManager:
)
if unavailable_agents:
LOGGER.warning(
"Backup agents %s are not available, will backupp to %s",
"Backup agents %s are not available, will backup to %s",
unavailable_agents,
available_agents,
)

View File

@@ -331,9 +331,6 @@ async def handle_config_info(
"""Send the stored backup config."""
manager = hass.data[DATA_MANAGER]
config = manager.config.data.to_dict()
# Remove state from schedule, it's not needed in the frontend
# mypy doesn't like deleting from TypedDict, ignore it
del config["schedule"]["state"] # type: ignore[misc]
connection.send_result(
msg["id"],
{

View File

@@ -25,7 +25,6 @@ SERVICE_TRIGGER = "trigger_camera"
SERVICE_SAVE_VIDEO = "save_video"
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
SERVICE_SEND_PIN = "send_pin"
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
PLATFORMS = [
Platform.ALARM_CONTROL_PANEL,

View File

@@ -5,12 +5,12 @@ from __future__ import annotations
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_PIN
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_PIN
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN
from .const import DOMAIN, SERVICE_SEND_PIN
from .coordinator import BlinkConfigEntry
SERVICE_SEND_PIN_SCHEMA = vol.Schema(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/blue_current",
"iot_class": "cloud_push",
"loggers": ["bluecurrent_api"],
"requirements": ["bluecurrent-api==1.2.4"]
"requirements": ["bluecurrent-api==1.3.1"]
}

View File

@@ -388,12 +388,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
mode = BluetoothScanningMode.PASSIVE if passive else BluetoothScanningMode.ACTIVE
scanner = HaScanner(mode, adapter, address)
scanner.async_setup()
try:
await scanner.async_start()
except (RuntimeError, ScannerStartError) as err:
raise ConfigEntryNotReady(
f"{adapter_human_name(adapter, address)}: {err}"
) from err
adapters = await manager.async_get_bluetooth_adapters()
details = adapters[adapter]
if entry.title == address:
@@ -401,8 +395,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, title=adapter_title(adapter, details)
)
slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS
# Register the scanner before starting so
# any raw advertisement data can be processed
entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots))
await async_update_device(hass, entry, adapter, details)
try:
await scanner.async_start()
except (RuntimeError, ScannerStartError) as err:
raise ConfigEntryNotReady(
f"{adapter_human_name(adapter, address)}: {err}"
) from err
entry.async_on_unload(entry.add_update_listener(async_update_listener))
entry.async_on_unload(scanner.async_stop)
return True

View File

@@ -235,10 +235,9 @@ class HomeAssistantBluetoothManager(BluetoothManager):
def _async_save_scanner_history(self, scanner: BaseHaScanner) -> None:
"""Save the scanner history."""
if isinstance(scanner, BaseHaRemoteScanner):
self.storage.async_set_advertisement_history(
scanner.source, scanner.serialize_discovered_devices()
)
self.storage.async_set_advertisement_history(
scanner.source, scanner.serialize_discovered_devices()
)
def _async_unregister_scanner(
self, scanner: BaseHaScanner, unregister: CALLBACK_TYPE
@@ -285,9 +284,8 @@ class HomeAssistantBluetoothManager(BluetoothManager):
connection_slots: int | None = None,
) -> CALLBACK_TYPE:
"""Register a scanner."""
if isinstance(scanner, BaseHaRemoteScanner):
if history := self.storage.async_get_advertisement_history(scanner.source):
scanner.restore_discovered_devices(history)
if history := self.storage.async_get_advertisement_history(scanner.source):
scanner.restore_discovered_devices(history)
unregister = super().async_register_scanner(scanner, connection_slots)
return partial(self._async_unregister_scanner, scanner, unregister)

View File

@@ -16,11 +16,11 @@
"quality_scale": "internal",
"requirements": [
"bleak==1.0.1",
"bleak-retry-connector==4.0.0",
"bleak-retry-connector==4.0.1",
"bluetooth-adapters==2.0.0",
"bluetooth-auto-recovery==1.5.2",
"bluetooth-data-tools==1.28.2",
"dbus-fast==2.44.2",
"habluetooth==4.0.1"
"dbus-fast==2.44.3",
"habluetooth==5.0.1"
]
}

View File

@@ -39,7 +39,13 @@ def async_setup(hass: HomeAssistant) -> None:
def serialize_service_info(
service_info: BluetoothServiceInfoBleak, time_diff: float
) -> dict[str, Any]:
"""Serialize a BluetoothServiceInfoBleak object."""
"""Serialize a BluetoothServiceInfoBleak object.
The raw field is included for:
1. Debugging - to see the actual advertisement packet
2. Data freshness - manufacturer_data and service_data are aggregated
across multiple advertisements, raw shows the latest packet only
"""
return {
"name": service_info.name,
"address": service_info.address,
@@ -57,6 +63,7 @@ def serialize_service_info(
"connectable": service_info.connectable,
"time": service_info.time + time_diff,
"tx_power": service_info.tx_power,
"raw": service_info.raw.hex() if service_info.raw else None,
}

View File

@@ -6,4 +6,3 @@ CONF_INSTALLER_CODE = "installer_code"
CONF_USER_CODE = "user_code"
ATTR_DATETIME = "datetime"
SERVICE_SET_DATE_TIME = "set_date_time"
ATTR_CONFIG_ENTRY_ID = "config_entry_id"

View File

@@ -9,12 +9,13 @@ from typing import Any
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from homeassistant.util import dt as dt_util
from .const import ATTR_CONFIG_ENTRY_ID, ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
from .const import ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
from .types import BoschAlarmConfigEntry

View File

@@ -95,7 +95,7 @@
"name": "Battery missing"
},
"panel_fault_ac_fail": {
"name": "AC Failure"
"name": "AC failure"
},
"panel_fault_parameter_crc_fail_in_pif": {
"name": "CRC failure in panel configuration"

View File

@@ -69,12 +69,7 @@ class SHCEntity(SHCBaseEntity):
manufacturer=device.manufacturer,
model=device.device_model,
name=device.name,
via_device=(
DOMAIN,
device.parent_device_id
if device.parent_device_id is not None
else parent_id,
),
via_device=(DOMAIN, device.root_device_id),
)
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
"iot_class": "local_push",
"loggers": ["boschshcpy"],
"requirements": ["boschshcpy==0.2.91"],
"requirements": ["boschshcpy==0.2.107"],
"zeroconf": [
{
"type": "_http._tcp.local.",

View File

@@ -53,8 +53,7 @@ async def async_setup_entry(
assert unique_id is not None
async_add_entities(
BraviaTVButton(coordinator, unique_id, config_entry.title, description)
for description in BUTTONS
BraviaTVButton(coordinator, unique_id, description) for description in BUTTONS
)
@@ -67,11 +66,10 @@ class BraviaTVButton(BraviaTVEntity, ButtonEntity):
self,
coordinator: BraviaTVCoordinator,
unique_id: str,
model: str,
description: BraviaTVButtonDescription,
) -> None:
"""Initialize the button."""
super().__init__(coordinator, unique_id, model)
super().__init__(coordinator, unique_id)
self._attr_unique_id = f"{unique_id}_{description.key}"
self.entity_description = description

View File

@@ -79,14 +79,16 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
system_info = await self.client.get_system_info()
cid = system_info[ATTR_CID].lower()
title = system_info[ATTR_MODEL]
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
await self.async_set_unique_id(cid)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=title, data=self.device_config)
return self.async_create_entry(
title=f"{system_info['name']} {system_info[ATTR_MODEL]}",
data=self.device_config,
)
async def async_reauth_device(self) -> ConfigFlowResult:
"""Reauthorize Bravia TV device from config."""

View File

@@ -81,6 +81,7 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
self.system_info: dict[str, str] = {}
self.source: str | None = None
self.source_list: list[str] = []
self.source_map: dict[str, dict] = {}
@@ -150,6 +151,9 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
self.is_on = power_status == "active"
self.skipped_updates = 0
if not self.system_info:
self.system_info = await self.client.get_system_info()
if self.is_on is False:
return

View File

@@ -12,23 +12,16 @@ class BraviaTVEntity(CoordinatorEntity[BraviaTVCoordinator]):
_attr_has_entity_name = True
def __init__(
self,
coordinator: BraviaTVCoordinator,
unique_id: str,
model: str,
) -> None:
def __init__(self, coordinator: BraviaTVCoordinator, unique_id: str) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
connections={(CONNECTION_NETWORK_MAC, coordinator.system_info["macAddr"])},
manufacturer=ATTR_MANUFACTURER,
model=model,
name=f"{ATTR_MANUFACTURER} {model}",
model_id=coordinator.system_info["model"],
hw_version=coordinator.system_info["generation"],
serial_number=coordinator.system_info["serial"],
)
if coordinator.client.mac is not None:
self._attr_device_info["connections"] = {
(CONNECTION_NETWORK_MAC, coordinator.client.mac)
}

View File

@@ -34,9 +34,7 @@ async def async_setup_entry(
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities(
[BraviaTVMediaPlayer(coordinator, unique_id, config_entry.title)]
)
async_add_entities([BraviaTVMediaPlayer(coordinator, unique_id)])
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):

View File

@@ -24,7 +24,7 @@ async def async_setup_entry(
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities([BraviaTVRemote(coordinator, unique_id, config_entry.title)])
async_add_entities([BraviaTVRemote(coordinator, unique_id)])
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):

View File

@@ -64,6 +64,7 @@ class BroadlinkUpdateManager(ABC, Generic[_ApiT]):
device.hass,
_LOGGER,
name=f"{device.name} ({device.api.model} at {device.api.host[0]})",
config_entry=device.config,
update_method=self.async_update,
update_interval=self.SCAN_INTERVAL,
)

View File

@@ -2,7 +2,16 @@
import dataclasses
from bsblan import BSBLAN, BSBLANConfig, Device, Info, StaticState
from bsblan import (
BSBLAN,
BSBLANAuthError,
BSBLANConfig,
BSBLANConnectionError,
BSBLANError,
Device,
Info,
StaticState,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
@@ -13,9 +22,14 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_PASSKEY
from .const import CONF_PASSKEY, DOMAIN
from .coordinator import BSBLanUpdateCoordinator
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
@@ -54,10 +68,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
await coordinator.async_config_entry_first_refresh()
# Fetch all required data concurrently
device = await bsblan.device()
info = await bsblan.info()
static = await bsblan.static_values()
try:
# Fetch all required data sequentially
device = await bsblan.device()
info = await bsblan.info()
static = await bsblan.static_values()
except BSBLANConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="setup_connection_error",
translation_placeholders={"host": entry.data[CONF_HOST]},
) from err
except BSBLANAuthError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="setup_auth_error",
) from err
except BSBLANError as err:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="setup_general_error",
) from err
entry.runtime_data = BSBLanData(
client=bsblan,

View File

@@ -2,9 +2,10 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from bsblan import BSBLAN, BSBLANConfig, BSBLANError
from bsblan import BSBLAN, BSBLANAuthError, BSBLANConfig, BSBLANError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
@@ -45,7 +46,7 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
self.username = user_input.get(CONF_USERNAME)
self.password = user_input.get(CONF_PASSWORD)
return await self._validate_and_create()
return await self._validate_and_create(user_input)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
@@ -128,14 +129,29 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
self.username = user_input.get(CONF_USERNAME)
self.password = user_input.get(CONF_PASSWORD)
return await self._validate_and_create(is_discovery=True)
return await self._validate_and_create(user_input, is_discovery=True)
async def _validate_and_create(
self, is_discovery: bool = False
self, user_input: dict[str, Any], is_discovery: bool = False
) -> ConfigFlowResult:
"""Validate device connection and create entry."""
try:
await self._get_bsblan_info(is_discovery=is_discovery)
await self._get_bsblan_info()
except BSBLANAuthError:
if is_discovery:
return self.async_show_form(
step_id="discovery_confirm",
data_schema=vol.Schema(
{
vol.Optional(CONF_PASSKEY): str,
vol.Optional(CONF_USERNAME): str,
vol.Optional(CONF_PASSWORD): str,
}
),
errors={"base": "invalid_auth"},
description_placeholders={"host": str(self.host)},
)
return self._show_setup_form({"base": "invalid_auth"}, user_input)
except BSBLANError:
if is_discovery:
return self.async_show_form(
@@ -154,18 +170,137 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
return self._async_create_entry()
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauth flow."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth confirmation flow."""
existing_entry = self.hass.config_entries.async_get_entry(
self.context["entry_id"]
)
assert existing_entry
if user_input is None:
# Preserve existing values as defaults
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Optional(
CONF_PASSKEY,
default=existing_entry.data.get(
CONF_PASSKEY, vol.UNDEFINED
),
): str,
vol.Optional(
CONF_USERNAME,
default=existing_entry.data.get(
CONF_USERNAME, vol.UNDEFINED
),
): str,
vol.Optional(
CONF_PASSWORD,
default=vol.UNDEFINED,
): str,
}
),
)
# Combine existing data with the user's new input for validation.
# This correctly handles adding, changing, and clearing credentials.
config_data = existing_entry.data.copy()
config_data.update(user_input)
self.host = config_data[CONF_HOST]
self.port = config_data[CONF_PORT]
self.passkey = config_data.get(CONF_PASSKEY)
self.username = config_data.get(CONF_USERNAME)
self.password = config_data.get(CONF_PASSWORD)
try:
await self._get_bsblan_info(raise_on_progress=False, is_reauth=True)
except BSBLANAuthError:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Optional(
CONF_PASSKEY,
default=user_input.get(CONF_PASSKEY, vol.UNDEFINED),
): str,
vol.Optional(
CONF_USERNAME,
default=user_input.get(CONF_USERNAME, vol.UNDEFINED),
): str,
vol.Optional(
CONF_PASSWORD,
default=vol.UNDEFINED,
): str,
}
),
errors={"base": "invalid_auth"},
)
except BSBLANError:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Optional(
CONF_PASSKEY,
default=user_input.get(CONF_PASSKEY, vol.UNDEFINED),
): str,
vol.Optional(
CONF_USERNAME,
default=user_input.get(CONF_USERNAME, vol.UNDEFINED),
): str,
vol.Optional(
CONF_PASSWORD,
default=vol.UNDEFINED,
): str,
}
),
errors={"base": "cannot_connect"},
)
# Update only the fields that were provided by the user
return self.async_update_reload_and_abort(
existing_entry, data_updates=user_input, reason="reauth_successful"
)
@callback
def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult:
def _show_setup_form(
self, errors: dict | None = None, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Show the setup form to the user."""
# Preserve user input if provided, otherwise use defaults
defaults = user_input or {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_PASSKEY): str,
vol.Optional(CONF_USERNAME): str,
vol.Optional(CONF_PASSWORD): str,
vol.Required(
CONF_HOST, default=defaults.get(CONF_HOST, vol.UNDEFINED)
): str,
vol.Optional(
CONF_PORT, default=defaults.get(CONF_PORT, DEFAULT_PORT)
): int,
vol.Optional(
CONF_PASSKEY, default=defaults.get(CONF_PASSKEY, vol.UNDEFINED)
): str,
vol.Optional(
CONF_USERNAME,
default=defaults.get(CONF_USERNAME, vol.UNDEFINED),
): str,
vol.Optional(
CONF_PASSWORD,
default=defaults.get(CONF_PASSWORD, vol.UNDEFINED),
): str,
}
),
errors=errors or {},
@@ -186,7 +321,9 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
)
async def _get_bsblan_info(
self, raise_on_progress: bool = True, is_discovery: bool = False
self,
raise_on_progress: bool = True,
is_reauth: bool = False,
) -> None:
"""Get device information from a BSBLAN device."""
config = BSBLANConfig(
@@ -209,11 +346,13 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
format_mac(self.mac), raise_on_progress=raise_on_progress
)
# Always allow updating host/port for both user and discovery flows
# This ensures connectivity is maintained when devices change IP addresses
self._abort_if_unique_id_configured(
updates={
CONF_HOST: self.host,
CONF_PORT: self.port,
}
)
# Skip unique_id configuration check during reauth to prevent "already_configured" abort
if not is_reauth:
# Always allow updating host/port for both user and discovery flows
# This ensures connectivity is maintained when devices change IP addresses
self._abort_if_unique_id_configured(
updates={
CONF_HOST: self.host,
CONF_PORT: self.port,
}
)

View File

@@ -4,11 +4,19 @@ from dataclasses import dataclass
from datetime import timedelta
from random import randint
from bsblan import BSBLAN, BSBLANConnectionError, HotWaterState, Sensor, State
from bsblan import (
BSBLAN,
BSBLANAuthError,
BSBLANConnectionError,
HotWaterState,
Sensor,
State,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
@@ -62,6 +70,10 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]):
state = await self.client.state()
sensor = await self.client.sensor()
dhw = await self.client.hot_water_state()
except BSBLANAuthError as err:
raise ConfigEntryAuthFailed(
"Authentication failed for BSB-Lan device"
) from err
except BSBLANConnectionError as err:
host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown"
raise UpdateFailed(

View File

@@ -33,14 +33,30 @@
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
}
},
"reauth_confirm": {
"title": "[%key:common::config_flow::title::reauth%]",
"description": "The BSB-Lan integration needs to re-authenticate with {name}",
"data": {
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
"username": "[%key:common::config_flow::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
}
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
}
},
"exceptions": {
@@ -55,6 +71,15 @@
},
"set_operation_mode_error": {
"message": "An error occurred while setting the operation mode"
},
"setup_connection_error": {
"message": "Failed to retrieve static device data from BSB-Lan device at {host}"
},
"setup_auth_error": {
"message": "Authentication failed while retrieving static device data"
},
"setup_general_error": {
"message": "An unknown error occurred while retrieving static device data"
}
},
"entity": {

View File

@@ -25,7 +25,7 @@
"services": {
"press": {
"name": "Press",
"description": "Press the button entity."
"description": "Presses a button entity."
}
}
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/caldav",
"iot_class": "cloud_polling",
"loggers": ["caldav", "vobject"],
"requirements": ["caldav==1.6.0", "icalendar==6.1.0"]
"requirements": ["caldav==1.6.0", "icalendar==6.3.1"]
}

View File

@@ -255,7 +255,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
)
entity_description: ClimateEntityDescription
_attr_current_humidity: int | None = None
_attr_current_humidity: float | None = None
_attr_current_temperature: float | None = None
_attr_fan_mode: str | None
_attr_fan_modes: list[str] | None

View File

@@ -100,16 +100,10 @@ set_hvac_mode:
fields:
hvac_mode:
selector:
select:
options:
- "off"
- "auto"
- "cool"
- "dry"
- "fan_only"
- "heat_cool"
- "heat"
translation_key: hvac_mode
state:
hide_states:
- unavailable
- unknown
set_swing_mode:
target:
entity:

View File

@@ -6,12 +6,16 @@ import asyncio
from collections.abc import Callable
from contextlib import suppress
from datetime import datetime, timedelta
from http import HTTPStatus
import logging
from typing import TYPE_CHECKING, Any
import aiohttp
from hass_nabucasa import Cloud, cloud_api
from hass_nabucasa import AlexaApiError, Cloud
from hass_nabucasa.alexa_api import (
AlexaAccessTokenDetails,
AlexaApiNeedsRelinkError,
AlexaApiNoTokenError,
)
from yarl import URL
from homeassistant.components import persistent_notification
@@ -146,7 +150,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
self._cloud_user = cloud_user
self._prefs = prefs
self._cloud = cloud
self._token = None
self._token: str | None = None
self._token_valid: datetime | None = None
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
self._alexa_sync_unsub: Callable[[], None] | None = None
@@ -318,32 +322,31 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
async def async_get_access_token(self) -> str | None:
"""Get an access token."""
details: AlexaAccessTokenDetails | None
if self._token_valid is not None and self._token_valid > utcnow():
return self._token
resp = await cloud_api.async_alexa_access_token(self._cloud)
body = await resp.json()
try:
details = await self._cloud.alexa_api.access_token()
except AlexaApiNeedsRelinkError as exception:
if self.should_report_state:
persistent_notification.async_create(
self.hass,
(
"There was an error reporting state to Alexa"
f" ({exception.reason}). Please re-link your Alexa skill via"
" the Alexa app to continue using it."
),
"Alexa state reporting disabled",
"cloud_alexa_report",
)
raise alexa_errors.RequireRelink from exception
except (AlexaApiNoTokenError, AlexaApiError) as exception:
raise alexa_errors.NoTokenAvailable from exception
if resp.status == HTTPStatus.BAD_REQUEST:
if body["reason"] in ("RefreshTokenNotFound", "UnknownRegion"):
if self.should_report_state:
persistent_notification.async_create(
self.hass,
(
"There was an error reporting state to Alexa"
f" ({body['reason']}). Please re-link your Alexa skill via"
" the Alexa app to continue using it."
),
"Alexa state reporting disabled",
"cloud_alexa_report",
)
raise alexa_errors.RequireRelink
raise alexa_errors.NoTokenAvailable
self._token = body["access_token"]
self._endpoint = body["event_endpoint"]
self._token_valid = utcnow() + timedelta(seconds=body["expires_in"])
self._token = details["access_token"]
self._endpoint = details["event_endpoint"]
self._token_valid = utcnow() + timedelta(seconds=details["expires_in"])
return self._token
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:

View File

@@ -7,7 +7,7 @@ from http import HTTPStatus
import logging
from typing import TYPE_CHECKING, Any
from hass_nabucasa import Cloud, cloud_api
from hass_nabucasa import Cloud
from hass_nabucasa.google_report_state import ErrorResponse
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
@@ -377,7 +377,7 @@ class CloudGoogleConfig(AbstractConfig):
return HTTPStatus.OK
async with self._sync_entities_lock:
resp = await cloud_api.async_google_actions_request_sync(self._cloud)
resp = await self._cloud.google_report_state.request_sync()
return resp.status
async def async_connect_agent_user(self, agent_user_id: str) -> None:

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==0.110.0"],
"requirements": ["hass-nabucasa==1.0.0"],
"single_config_entry": true
}

View File

@@ -4,11 +4,13 @@ from __future__ import annotations
import asyncio
import logging
from typing import Any
from aiohttp.client_exceptions import ClientError
from hass_nabucasa import Cloud, cloud_api
from hass_nabucasa.payments_api import PaymentsApiError, SubscriptionInfo
from hass_nabucasa import (
Cloud,
MigratePaypalAgreementInfo,
PaymentsApiError,
SubscriptionInfo,
)
from .client import CloudClient
from .const import REQUEST_TIMEOUT
@@ -29,17 +31,17 @@ async def async_subscription_info(cloud: Cloud[CloudClient]) -> SubscriptionInfo
async def async_migrate_paypal_agreement(
cloud: Cloud[CloudClient],
) -> dict[str, Any] | None:
) -> MigratePaypalAgreementInfo | None:
"""Migrate a paypal agreement from legacy."""
try:
async with asyncio.timeout(REQUEST_TIMEOUT):
return await cloud_api.async_migrate_paypal_agreement(cloud)
return await cloud.payments.migrate_paypal_agreement()
except TimeoutError:
_LOGGER.error(
"A timeout of %s was reached while trying to start agreement migration",
REQUEST_TIMEOUT,
)
except ClientError as exception:
except PaymentsApiError as exception:
_LOGGER.error("Failed to start agreement migration - %s", exception)
return None

View File

@@ -7,22 +7,18 @@ import logging
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.util import Throttle
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_AVALIABLE,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_HOLD,
API_ACCOUNT_ID,
API_ACCOUNT_NAME,
@@ -31,12 +27,9 @@ from .const import (
API_DATA,
API_RATES_CURRENCY,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
API_V3_ACCOUNT_ID,
API_V3_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_RATES,
)
_LOGGER = logging.getLogger(__name__)
@@ -51,9 +44,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
"""Set up Coinbase from a config entry."""
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
entry.async_on_unload(entry.add_update_listener(update_listener))
entry.runtime_data = instance
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
@@ -68,68 +58,28 @@ async def async_unload_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
"""Create and update a Coinbase Data instance."""
# Check if user is using deprecated v2 API credentials
if "organizations" not in entry.data[CONF_API_KEY]:
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
version = "v2"
else:
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
# Trigger reauthentication to ask user for v3 credentials
raise ConfigEntryAuthFailed(
"Your Coinbase API key appears to be for the deprecated v2 API. "
"Please reconfigure with a new API key created for the v3 API. "
"Visit https://www.coinbase.com/developer-platform to create new credentials."
)
version = "v3"
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
)
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
instance = CoinbaseData(client, base_rate, version)
instance = CoinbaseData(client, base_rate)
instance.update()
return instance
async def update_listener(
hass: HomeAssistant, config_entry: CoinbaseConfigEntry
) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
registry = er.async_get(hass)
entities = er.async_entries_for_config_entry(registry, config_entry.entry_id)
# Remove orphaned entities
for entity in entities:
currency = entity.unique_id.split("-")[-1]
if (
"xe" in entity.unique_id
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
) or (
"wallet" in entity.unique_id
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
):
registry.async_remove(entity.entity_id)
def get_accounts(client, version):
def get_accounts(client):
"""Handle paginated accounts."""
response = client.get_accounts()
if version == "v2":
accounts = response[API_DATA]
next_starting_after = response.pagination.next_starting_after
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return [
{
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
}
for account in accounts
]
accounts = response[API_ACCOUNTS]
while response["has_next"]:
response = client.get_accounts(cursor=response["cursor"])
@@ -153,37 +103,28 @@ def get_accounts(client, version):
class CoinbaseData:
"""Get the latest data and update the states."""
def __init__(self, client, exchange_base, version):
def __init__(self, client, exchange_base):
"""Init the coinbase data object."""
self.client = client
self.accounts = None
self.exchange_base = exchange_base
self.exchange_rates = None
if version == "v2":
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
else:
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
self.api_version = version
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from coinbase."""
try:
self.accounts = get_accounts(self.client, self.api_version)
if self.api_version == "v2":
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
else:
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except (AuthenticationError, HTTPError) as coinbase_error:
self.accounts = get_accounts(self.client)
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except HTTPError as coinbase_error:
_LOGGER.error(
"Authentication error connecting to coinbase: %s", coinbase_error
)

View File

@@ -2,17 +2,20 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from homeassistant.config_entries import (
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithReload,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
@@ -45,9 +48,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
def get_user_from_client(api_key, api_token):
"""Get the user name from Coinbase API credentials."""
if "organizations" not in api_key:
client = LegacyClient(api_key, api_token)
return client.get_current_user()["name"]
client = RESTClient(api_key=api_key, api_secret=api_token)
return client.get_portfolios()["portfolios"][0]["name"]
@@ -59,7 +59,7 @@ async def validate_api(hass: HomeAssistant, data):
user = await hass.async_add_executor_job(
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
)
except (AuthenticationError, HTTPError) as error:
except HTTPError as error:
if "api key" in str(error) or " 401 Client Error" in str(error):
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
raise InvalidKey from error
@@ -74,8 +74,8 @@ async def validate_api(hass: HomeAssistant, data):
raise InvalidAuth from error
except ConnectionError as error:
raise CannotConnect from error
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
return {"title": user, "api_version": api_version}
return {"title": user}
async def validate_options(
@@ -85,20 +85,17 @@ async def validate_options(
client = config_entry.runtime_data.client
accounts = await hass.async_add_executor_job(
get_accounts, client, config_entry.data.get("api_version", "v2")
)
accounts = await hass.async_add_executor_job(get_accounts, client)
accounts_currencies = [
account[API_ACCOUNT_CURRENCY]
for account in accounts
if not account[ACCOUNT_IS_VAULT]
]
if config_entry.data.get("api_version", "v2") == "v2":
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
else:
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
if CONF_CURRENCIES in options:
for currency in options[CONF_CURRENCIES]:
if currency not in accounts_currencies:
@@ -117,6 +114,8 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
reauth_entry: CoinbaseConfigEntry
async def async_step_user(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
@@ -143,12 +142,63 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
user_input[CONF_API_VERSION] = info["api_version"]
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication flow."""
self.reauth_entry = self._get_reauth_entry()
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
},
errors=errors,
)
try:
await validate_api(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidKey:
errors["base"] = "invalid_auth_key"
except InvalidSecret:
errors["base"] = "invalid_auth_secret"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
self.reauth_entry,
data_updates=user_input,
reason="reauth_successful",
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
},
errors=errors,
)
@staticmethod
@callback
def async_get_options_flow(
@@ -158,7 +208,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
return OptionsFlowHandler()
class OptionsFlowHandler(OptionsFlow):
class OptionsFlowHandler(OptionsFlowWithReload):
"""Handle a option flow for Coinbase."""
async def async_step_init(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coinbase",
"iot_class": "cloud_polling",
"loggers": ["coinbase"],
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
"requirements": ["coinbase-advanced-py==1.2.2"]
}

View File

@@ -6,6 +6,7 @@ import logging
from homeassistant.components.sensor import SensorEntity, SensorStateClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -27,7 +28,6 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
ATTR_NATIVE_BALANCE = "Balance in native currency"
ATTR_API_VERSION = "API Version"
CURRENCY_ICONS = {
"BTC": "mdi:currency-btc",
@@ -69,11 +69,26 @@ async def async_setup_entry(
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
)
# Remove orphaned entities
registry = er.async_get(hass)
existing_entities = er.async_entries_for_config_entry(
registry, config_entry.entry_id
)
for entity in existing_entities:
currency = entity.unique_id.split("-")[-1]
if (
"xe" in entity.unique_id
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
) or (
"wallet" in entity.unique_id
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
):
registry.async_remove(entity.entity_id)
for currency in desired_currencies:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
"Attempting to set up %s account sensor",
currency,
instance.api_version,
)
if currency not in provided_currencies:
_LOGGER.warning(
@@ -89,9 +104,8 @@ async def async_setup_entry(
if CONF_EXCHANGE_RATES in config_entry.options:
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
"Attempting to set up %s exchange rate sensor",
rate,
instance.api_version,
)
entities.append(
ExchangeRateSensor(
@@ -146,15 +160,13 @@ class AccountSensor(SensorEntity):
"""Return the state attributes of the sensor."""
return {
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
ATTR_API_VERSION: self._coinbase_data.api_version,
}
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s account sensor with %s API",
"Updating %s account sensor",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
for account in self._coinbase_data.accounts:
@@ -210,9 +222,8 @@ class ExchangeRateSensor(SensorEntity):
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s rate sensor with %s API",
"Updating %s rate sensor",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
self._attr_native_value = round(

View File

@@ -8,6 +8,14 @@
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"
}
},
"reauth_confirm": {
"title": "Update Coinbase API credentials",
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"
}
}
},
"error": {
@@ -18,7 +26,8 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "Successfully updated credentials"
}
},
"options": {

View File

@@ -146,8 +146,9 @@ def _prepare_config_flow_result_json(
return prepare_result_json(result)
data = result.copy()
entry: config_entries.ConfigEntry = data["result"]
data["result"] = entry.as_json_fragment
entry: config_entries.ConfigEntry = data["result"] # type: ignore[typeddict-item]
# We overwrite the ConfigEntry object with its json representation.
data["result"] = entry.as_json_fragment # type: ignore[typeddict-unknown-key]
data.pop("data")
data.pop("context")
return data

View File

@@ -40,6 +40,7 @@ from .chat_log import (
ConverseError,
SystemContent,
ToolResultContent,
ToolResultContentDeltaDict,
UserContent,
async_get_chat_log,
)
@@ -79,6 +80,7 @@ __all__ = [
"ConverseError",
"SystemContent",
"ToolResultContent",
"ToolResultContentDeltaDict",
"UserContent",
"async_conversation_trace_append",
"async_converse",
@@ -117,7 +119,7 @@ CONFIG_SCHEMA = vol.Schema(
{cv.string: vol.All(cv.ensure_list, [cv.string])}
)
}
)
),
},
extra=vol.ALLOW_EXTRA,
)
@@ -268,8 +270,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
agent_config = config.get(DOMAIN, {})
await async_setup_default_agent(
hass, entity_component, config.get(DOMAIN, {}).get("intents", {})
hass, entity_component, config_intents=agent_config.get("intents", {})
)
async def handle_process(service: ServiceCall) -> ServiceResponse:

View File

@@ -9,7 +9,7 @@ from contextvars import ContextVar
from dataclasses import asdict, dataclass, field, replace
import logging
from pathlib import Path
from typing import Any, Literal, TypedDict
from typing import Any, Literal, TypedDict, cast
import voluptuous as vol
@@ -161,7 +161,9 @@ class AssistantContent:
role: Literal["assistant"] = field(init=False, default="assistant")
agent_id: str
content: str | None = None
thinking_content: str | None = None
tool_calls: list[llm.ToolInput] | None = None
native: Any = None
@dataclass(frozen=True)
@@ -183,7 +185,18 @@ class AssistantContentDeltaDict(TypedDict, total=False):
role: Literal["assistant"]
content: str | None
thinking_content: str | None
tool_calls: list[llm.ToolInput] | None
native: Any
class ToolResultContentDeltaDict(TypedDict, total=False):
"""Tool result content."""
role: Literal["tool_result"]
tool_call_id: str
tool_name: str
tool_result: JsonObjectType
@dataclass
@@ -231,17 +244,25 @@ class ChatLog:
@callback
def async_add_assistant_content_without_tools(
self, content: AssistantContent
self, content: AssistantContent | ToolResultContent
) -> None:
"""Add assistant content to the log."""
"""Add assistant content to the log.
Allows assistant content without tool calls or with external tool calls only,
as well as tool results for the external tools.
"""
LOGGER.debug("Adding assistant content: %s", content)
if content.tool_calls is not None:
raise ValueError("Tool calls not allowed")
if (
isinstance(content, AssistantContent)
and content.tool_calls is not None
and any(not tool_call.external for tool_call in content.tool_calls)
):
raise ValueError("Non-external tool calls not allowed")
self.content.append(content)
async def async_add_assistant_content(
self,
content: AssistantContent,
content: AssistantContent | ToolResultContent,
/,
tool_call_tasks: dict[str, asyncio.Task] | None = None,
) -> AsyncGenerator[ToolResultContent]:
@@ -254,7 +275,11 @@ class ChatLog:
LOGGER.debug("Adding assistant content: %s", content)
self.content.append(content)
if content.tool_calls is None:
if (
not isinstance(content, AssistantContent)
or content.tool_calls is None
or all(tool_call.external for tool_call in content.tool_calls)
):
return
if self.llm_api is None:
@@ -263,13 +288,16 @@ class ChatLog:
if tool_call_tasks is None:
tool_call_tasks = {}
for tool_input in content.tool_calls:
if tool_input.id not in tool_call_tasks:
if tool_input.id not in tool_call_tasks and not tool_input.external:
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_input),
name=f"llm_tool_{tool_input.id}",
)
for tool_input in content.tool_calls:
if tool_input.external:
continue
LOGGER.debug(
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
)
@@ -292,7 +320,9 @@ class ChatLog:
yield response_content
async def async_add_delta_content_stream(
self, agent_id: str, stream: AsyncIterable[AssistantContentDeltaDict]
self,
agent_id: str,
stream: AsyncIterable[AssistantContentDeltaDict | ToolResultContentDeltaDict],
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
"""Stream content into the chat log.
@@ -306,6 +336,8 @@ class ChatLog:
The keys content and tool_calls will be concatenated if they appear multiple times.
"""
current_content = ""
current_thinking_content = ""
current_native: Any = None
current_tool_calls: list[llm.ToolInput] = []
tool_call_tasks: dict[str, asyncio.Task] = {}
@@ -314,34 +346,54 @@ class ChatLog:
# Indicates update to current message
if "role" not in delta:
if delta_content := delta.get("content"):
# ToolResultContentDeltaDict will always have a role
assistant_delta = cast(AssistantContentDeltaDict, delta)
if delta_content := assistant_delta.get("content"):
current_content += delta_content
if delta_tool_calls := delta.get("tool_calls"):
if self.llm_api is None:
raise ValueError("No LLM API configured")
if delta_thinking_content := assistant_delta.get("thinking_content"):
current_thinking_content += delta_thinking_content
if delta_native := assistant_delta.get("native"):
if current_native is not None:
raise RuntimeError(
"Native content already set, cannot overwrite"
)
current_native = delta_native
if delta_tool_calls := assistant_delta.get("tool_calls"):
current_tool_calls += delta_tool_calls
# Start processing the tool calls as soon as we know about them
for tool_call in delta_tool_calls:
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_call),
name=f"llm_tool_{tool_call.id}",
)
if not tool_call.external:
if self.llm_api is None:
raise ValueError("No LLM API configured")
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_call),
name=f"llm_tool_{tool_call.id}",
)
if self.delta_listener:
self.delta_listener(self, delta) # type: ignore[arg-type]
if filtered_delta := {
k: v for k, v in assistant_delta.items() if k != "native"
}:
# We do not want to send the native content to the listener
# as it is not JSON serializable
self.delta_listener(self, filtered_delta)
continue
# Starting a new message
if delta["role"] != "assistant":
raise ValueError(f"Only assistant role expected. Got {delta['role']}")
# Yield the previous message if it has content
if current_content or current_tool_calls:
content = AssistantContent(
if (
current_content
or current_thinking_content
or current_tool_calls
or current_native
):
content: AssistantContent | ToolResultContent = AssistantContent(
agent_id=agent_id,
content=current_content or None,
thinking_content=current_thinking_content or None,
tool_calls=current_tool_calls or None,
native=current_native,
)
yield content
async for tool_result in self.async_add_assistant_content(
@@ -350,18 +402,51 @@ class ChatLog:
yield tool_result
if self.delta_listener:
self.delta_listener(self, asdict(tool_result))
current_content = ""
current_thinking_content = ""
current_native = None
current_tool_calls = []
current_content = delta.get("content") or ""
current_tool_calls = delta.get("tool_calls") or []
if delta["role"] == "assistant":
current_content = delta.get("content") or ""
current_thinking_content = delta.get("thinking_content") or ""
current_tool_calls = delta.get("tool_calls") or []
current_native = delta.get("native")
if self.delta_listener:
self.delta_listener(self, delta) # type: ignore[arg-type]
if self.delta_listener:
if filtered_delta := {
k: v for k, v in delta.items() if k != "native"
}:
self.delta_listener(self, filtered_delta)
elif delta["role"] == "tool_result":
content = ToolResultContent(
agent_id=agent_id,
tool_call_id=delta["tool_call_id"],
tool_name=delta["tool_name"],
tool_result=delta["tool_result"],
)
yield content
if self.delta_listener:
self.delta_listener(self, asdict(content))
self.async_add_assistant_content_without_tools(content)
else:
raise ValueError(
"Only assistant and tool_result roles expected."
f" Got {delta['role']}"
)
if current_content or current_tool_calls:
if (
current_content
or current_thinking_content
or current_tool_calls
or current_native
):
content = AssistantContent(
agent_id=agent_id,
content=current_content or None,
thinking_content=current_thinking_content or None,
tool_calls=current_tool_calls or None,
native=current_native,
)
yield content
async for tool_result in self.async_add_assistant_content(

View File

@@ -14,14 +14,19 @@ import re
import time
from typing import IO, Any, cast
from hassil.expression import Expression, ListReference, Sequence, TextChunk
from hassil.expression import Expression, Group, ListReference, TextChunk
from hassil.fuzzy import FuzzyNgramMatcher, SlotCombinationInfo
from hassil.intents import (
Intent,
IntentData,
Intents,
SlotList,
TextSlotList,
TextSlotValue,
WildcardSlotList,
)
from hassil.models import MatchEntity
from hassil.ngram import Sqlite3NgramModel
from hassil.recognize import (
MISSING_ENTITY,
RecognizeResult,
@@ -31,7 +36,15 @@ from hassil.recognize import (
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
from hassil.trie import Trie
from hassil.util import merge_dict
from home_assistant_intents import ErrorKey, get_intents, get_languages
from home_assistant_intents import (
ErrorKey,
FuzzyConfig,
FuzzyLanguageResponses,
get_fuzzy_config,
get_fuzzy_language,
get_intents,
get_languages,
)
import yaml
from homeassistant import core
@@ -76,6 +89,7 @@ TRIGGER_CALLBACK_TYPE = Callable[
]
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
ERROR_SENTINEL = object()
@@ -94,6 +108,8 @@ class LanguageIntents:
intent_responses: dict[str, Any]
error_responses: dict[str, Any]
language_variant: str | None
fuzzy_matcher: FuzzyNgramMatcher | None = None
fuzzy_responses: FuzzyLanguageResponses | None = None
@dataclass(slots=True)
@@ -119,10 +135,13 @@ class IntentMatchingStage(Enum):
EXPOSED_ENTITIES_ONLY = auto()
"""Match against exposed entities only."""
FUZZY = auto()
"""Use fuzzy matching to guess intent."""
UNEXPOSED_ENTITIES = auto()
"""Match against unexposed entities in Home Assistant."""
FUZZY = auto()
UNKNOWN_NAMES = auto()
"""Capture names that are not known to Home Assistant."""
@@ -241,6 +260,10 @@ class DefaultAgent(ConversationEntity):
# LRU cache to avoid unnecessary intent matching
self._intent_cache = IntentCache(capacity=128)
# Shared configuration for fuzzy matching
self.fuzzy_matching = True
self._fuzzy_config: FuzzyConfig | None = None
@property
def supported_languages(self) -> list[str]:
"""Return a list of supported languages."""
@@ -299,7 +322,7 @@ class DefaultAgent(ConversationEntity):
_LOGGER.warning("No intents were loaded for language: %s", language)
return None
slot_lists = self._make_slot_lists()
slot_lists = await self._make_slot_lists()
intent_context = self._make_intent_context(user_input)
if self._exposed_names_trie is not None:
@@ -556,6 +579,36 @@ class DefaultAgent(ConversationEntity):
# Don't try matching against all entities or doing a fuzzy match
return None
# Use fuzzy matching
skip_fuzzy_match = False
if cache_value is not None:
if (cache_value.result is not None) and (
cache_value.stage == IntentMatchingStage.FUZZY
):
_LOGGER.debug("Got cached result for fuzzy match")
return cache_value.result
# Continue with matching, but we know we won't succeed for fuzzy
# match.
skip_fuzzy_match = True
if (not skip_fuzzy_match) and self.fuzzy_matching:
start_time = time.monotonic()
fuzzy_result = self._recognize_fuzzy(lang_intents, user_input)
# Update cache
self._intent_cache.put(
cache_key,
IntentCacheValue(result=fuzzy_result, stage=IntentMatchingStage.FUZZY),
)
_LOGGER.debug(
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
)
if fuzzy_result is not None:
return fuzzy_result
# Try again with all entities (including unexposed)
skip_unexposed_entities_match = False
if cache_value is not None:
@@ -601,102 +654,160 @@ class DefaultAgent(ConversationEntity):
# This should fail the intent handling phase (async_match_targets).
return strict_result
# Try again with missing entities enabled
skip_fuzzy_match = False
# Check unknown names
skip_unknown_names = False
if cache_value is not None:
if (cache_value.result is not None) and (
cache_value.stage == IntentMatchingStage.FUZZY
cache_value.stage == IntentMatchingStage.UNKNOWN_NAMES
):
_LOGGER.debug("Got cached result for fuzzy match")
_LOGGER.debug("Got cached result for unknown names")
return cache_value.result
# We know we won't succeed for fuzzy matching.
skip_fuzzy_match = True
skip_unknown_names = True
maybe_result: RecognizeResult | None = None
if not skip_fuzzy_match:
if not skip_unknown_names:
start_time = time.monotonic()
best_num_matched_entities = 0
best_num_unmatched_entities = 0
best_num_unmatched_ranges = 0
for result in recognize_all(
user_input.text,
lang_intents.intents,
slot_lists=slot_lists,
intent_context=intent_context,
allow_unmatched_entities=True,
):
if result.text_chunks_matched < 1:
# Skip results that don't match any literal text
continue
# Don't count missing entities that couldn't be filled from context
num_matched_entities = 0
for matched_entity in result.entities_list:
if matched_entity.name not in result.unmatched_entities:
num_matched_entities += 1
num_unmatched_entities = 0
num_unmatched_ranges = 0
for unmatched_entity in result.unmatched_entities_list:
if isinstance(unmatched_entity, UnmatchedTextEntity):
if unmatched_entity.text != MISSING_ENTITY:
num_unmatched_entities += 1
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
num_unmatched_ranges += 1
num_unmatched_entities += 1
else:
num_unmatched_entities += 1
if (
(maybe_result is None) # first result
or (
# More literal text matched
result.text_chunks_matched > maybe_result.text_chunks_matched
)
or (
# More entities matched
num_matched_entities > best_num_matched_entities
)
or (
# Fewer unmatched entities
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities < best_num_unmatched_entities)
)
or (
# Prefer unmatched ranges
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges > best_num_unmatched_ranges)
)
or (
# Prefer match failures with entities
(result.text_chunks_matched == maybe_result.text_chunks_matched)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges == best_num_unmatched_ranges)
and (
("name" in result.entities)
or ("name" in result.unmatched_entities)
)
)
):
maybe_result = result
best_num_matched_entities = num_matched_entities
best_num_unmatched_entities = num_unmatched_entities
best_num_unmatched_ranges = num_unmatched_ranges
maybe_result = self._recognize_unknown_names(
lang_intents, user_input, slot_lists, intent_context
)
# Update cache
self._intent_cache.put(
cache_key,
IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY),
IntentCacheValue(
result=maybe_result, stage=IntentMatchingStage.UNKNOWN_NAMES
),
)
_LOGGER.debug(
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
"Did unknown names match in %s second(s)", time.monotonic() - start_time
)
return maybe_result
def _recognize_fuzzy(
self, lang_intents: LanguageIntents, user_input: ConversationInput
) -> RecognizeResult | None:
"""Return fuzzy recognition from hassil."""
if lang_intents.fuzzy_matcher is None:
return None
fuzzy_result = lang_intents.fuzzy_matcher.match(user_input.text)
if fuzzy_result is None:
return None
response = "default"
if lang_intents.fuzzy_responses:
domain = "" # no domain
if "name" in fuzzy_result.slots:
domain = fuzzy_result.name_domain
elif "domain" in fuzzy_result.slots:
domain = fuzzy_result.slots["domain"].value
slot_combo = tuple(sorted(fuzzy_result.slots))
if (
intent_responses := lang_intents.fuzzy_responses.get(
fuzzy_result.intent_name
)
) and (combo_responses := intent_responses.get(slot_combo)):
response = combo_responses.get(domain, response)
entities = [
MatchEntity(name=slot_name, value=slot_value.value, text=slot_value.text)
for slot_name, slot_value in fuzzy_result.slots.items()
]
return RecognizeResult(
intent=Intent(name=fuzzy_result.intent_name),
intent_data=IntentData(sentence_texts=[]),
intent_metadata={METADATA_FUZZY_MATCH: True},
entities={entity.name: entity for entity in entities},
entities_list=entities,
response=response,
)
def _recognize_unknown_names(
self,
lang_intents: LanguageIntents,
user_input: ConversationInput,
slot_lists: dict[str, SlotList],
intent_context: dict[str, Any] | None,
) -> RecognizeResult | None:
"""Return result with unknown names for an error message."""
maybe_result: RecognizeResult | None = None
best_num_matched_entities = 0
best_num_unmatched_entities = 0
best_num_unmatched_ranges = 0
for result in recognize_all(
user_input.text,
lang_intents.intents,
slot_lists=slot_lists,
intent_context=intent_context,
allow_unmatched_entities=True,
):
if result.text_chunks_matched < 1:
# Skip results that don't match any literal text
continue
# Don't count missing entities that couldn't be filled from context
num_matched_entities = 0
for matched_entity in result.entities_list:
if matched_entity.name not in result.unmatched_entities:
num_matched_entities += 1
num_unmatched_entities = 0
num_unmatched_ranges = 0
for unmatched_entity in result.unmatched_entities_list:
if isinstance(unmatched_entity, UnmatchedTextEntity):
if unmatched_entity.text != MISSING_ENTITY:
num_unmatched_entities += 1
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
num_unmatched_ranges += 1
num_unmatched_entities += 1
else:
num_unmatched_entities += 1
if (
(maybe_result is None) # first result
or (
# More literal text matched
result.text_chunks_matched > maybe_result.text_chunks_matched
)
or (
# More entities matched
num_matched_entities > best_num_matched_entities
)
or (
# Fewer unmatched entities
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities < best_num_unmatched_entities)
)
or (
# Prefer unmatched ranges
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges > best_num_unmatched_ranges)
)
or (
# Prefer match failures with entities
(result.text_chunks_matched == maybe_result.text_chunks_matched)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges == best_num_unmatched_ranges)
and (
("name" in result.entities)
or ("name" in result.unmatched_entities)
)
)
):
maybe_result = result
best_num_matched_entities = num_matched_entities
best_num_unmatched_entities = num_unmatched_entities
best_num_unmatched_ranges = num_unmatched_ranges
return maybe_result
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
"""Get filtered slot list with unexposed entity names in Home Assistant."""
if self._unexposed_names_trie is None:
@@ -851,7 +962,7 @@ class DefaultAgent(ConversationEntity):
if lang_intents is None:
return
self._make_slot_lists()
await self._make_slot_lists()
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
"""Load all intents of a language with lock."""
@@ -1002,12 +1113,85 @@ class DefaultAgent(ConversationEntity):
intent_responses = responses_dict.get("intents", {})
error_responses = responses_dict.get("errors", {})
if not self.fuzzy_matching:
_LOGGER.debug("Fuzzy matching is disabled")
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
)
# Load fuzzy
fuzzy_info = get_fuzzy_language(language_variant, json_load=json_load)
if fuzzy_info is None:
_LOGGER.debug(
"Fuzzy matching not available for language: %s", language_variant
)
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
)
if self._fuzzy_config is None:
# Load shared config
self._fuzzy_config = get_fuzzy_config(json_load=json_load)
_LOGGER.debug("Loaded shared fuzzy matching config")
assert self._fuzzy_config is not None
fuzzy_matcher: FuzzyNgramMatcher | None = None
fuzzy_responses: FuzzyLanguageResponses | None = None
start_time = time.monotonic()
fuzzy_responses = fuzzy_info.responses
fuzzy_matcher = FuzzyNgramMatcher(
intents=intents,
intent_models={
intent_name: Sqlite3NgramModel(
order=fuzzy_model.order,
words={
word: str(word_id)
for word, word_id in fuzzy_model.words.items()
},
database_path=fuzzy_model.database_path,
)
for intent_name, fuzzy_model in fuzzy_info.ngram_models.items()
},
intent_slot_list_names=self._fuzzy_config.slot_list_names,
slot_combinations={
intent_name: {
combo_key: [
SlotCombinationInfo(
name_domains=(set(name_domains) if name_domains else None)
)
]
for combo_key, name_domains in intent_combos.items()
}
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()
},
domain_keywords=fuzzy_info.domain_keywords,
stop_words=fuzzy_info.stop_words,
)
_LOGGER.debug(
"Loaded fuzzy matcher in %s second(s): language=%s, intents=%s",
time.monotonic() - start_time,
language_variant,
sorted(fuzzy_matcher.intent_models.keys()),
)
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
fuzzy_matcher=fuzzy_matcher,
fuzzy_responses=fuzzy_responses,
)
@core.callback
@@ -1027,8 +1211,7 @@ class DefaultAgent(ConversationEntity):
# Slot lists have changed, so we must clear the cache
self._intent_cache.clear()
@core.callback
def _make_slot_lists(self) -> dict[str, SlotList]:
async def _make_slot_lists(self) -> dict[str, SlotList]:
"""Create slot lists with areas and entity names/aliases."""
if self._slot_lists is not None:
return self._slot_lists
@@ -1089,6 +1272,10 @@ class DefaultAgent(ConversationEntity):
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
}
# Reload fuzzy matchers with new slot lists
if self.fuzzy_matching:
await self.hass.async_add_executor_job(self._load_fuzzy_matchers)
self._listen_clear_slot_list()
_LOGGER.debug(
@@ -1098,6 +1285,25 @@ class DefaultAgent(ConversationEntity):
return self._slot_lists
def _load_fuzzy_matchers(self) -> None:
"""Reload fuzzy matchers for all loaded languages."""
for lang_intents in self._lang_intents.values():
if (not isinstance(lang_intents, LanguageIntents)) or (
lang_intents.fuzzy_matcher is None
):
continue
lang_matcher = lang_intents.fuzzy_matcher
lang_intents.fuzzy_matcher = FuzzyNgramMatcher(
intents=lang_matcher.intents,
intent_models=lang_matcher.intent_models,
intent_slot_list_names=lang_matcher.intent_slot_list_names,
slot_combinations=lang_matcher.slot_combinations,
domain_keywords=lang_matcher.domain_keywords,
stop_words=lang_matcher.stop_words,
slot_lists=self._slot_lists,
)
def _make_intent_context(
self, user_input: ConversationInput
) -> dict[str, Any] | None:
@@ -1183,7 +1389,7 @@ class DefaultAgent(ConversationEntity):
for trigger_intent in trigger_intents.intents.values():
for intent_data in trigger_intent.data:
for sentence in intent_data.sentences:
_collect_list_references(sentence, wildcard_names)
_collect_list_references(sentence.expression, wildcard_names)
for wildcard_name in wildcard_names:
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
@@ -1520,11 +1726,9 @@ def _get_match_error_response(
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
"""Collect list reference names recursively."""
if isinstance(expression, Sequence):
seq: Sequence = expression
for item in seq.items:
if isinstance(expression, Group):
for item in expression.items:
_collect_list_references(item, list_names)
elif isinstance(expression, ListReference):
# {list}
list_ref: ListReference = expression
list_names.add(list_ref.slot_name)
list_names.add(expression.slot_name)

View File

@@ -26,7 +26,11 @@ from .agent_manager import (
get_agent_manager,
)
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE
from .default_agent import (
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
METADATA_FUZZY_MATCH,
)
from .entity import ConversationEntity
from .models import ConversationInput
@@ -240,6 +244,8 @@ async def websocket_hass_agent_debug(
"sentence_template": "",
# When match is incomplete, this will contain the best slot guesses
"unmatched_slots": _get_unmatched_slots(intent_result),
# True if match was not exact
"fuzzy_match": False,
}
if successful_match:
@@ -251,16 +257,19 @@ async def websocket_hass_agent_debug(
if intent_result.intent_sentence is not None:
result_dict["sentence_template"] = intent_result.intent_sentence.text
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata and intent_result.intent_metadata.get(
METADATA_CUSTOM_SENTENCE
):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
if intent_result.intent_metadata:
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
)
else:
result_dict["source"] = "builtin"
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
METADATA_FUZZY_MATCH, False
)
else:
result_dict["source"] = "builtin"
result_dicts.append(result_dict)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.6.23"]
"requirements": ["hassil==3.1.0", "home-assistant-intents==2025.7.30"]
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["cookidoo_api"],
"quality_scale": "silver",
"requirements": ["cookidoo-api==0.12.2"]
"requirements": ["cookidoo-api==0.14.0"]
}

Some files were not shown because too many files have changed in this diff Show More