Compare commits

..

191 Commits

Author SHA1 Message Date
Paulus Schoutsen
176f9c9f94 Add decorator to define Python tools from Python functions 2025-08-17 20:59:10 +00:00
Pete Sage
9f17a8a943 Add tests and improve error handling for Sonos update_alarm service call (#150715) 2025-08-17 22:47:45 +02:00
Joost Lekkerkerker
b44c47cd80 Removing myself as codeowner of Enphase (#150811) 2025-08-17 22:35:23 +02:00
Joost Lekkerkerker
e80c090932 Pin gql to 3.5.3 (#150800) 2025-08-17 19:27:17 +02:00
Pete Sage
ff418f513a Add dialog mode select for Sonos Arc Ultra soundbar (#150637)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-17 17:15:29 +02:00
Joost Lekkerkerker
b222cc5889 Use lifecycle hook instead of storing callback in starline (#150707) 2025-08-17 17:08:35 +02:00
Yuxin Wang
db1707fd72 Mark config-flow-test-coverage as done in APCUPSD quality scale (#150733) 2025-08-17 17:08:25 +02:00
peteS-UK
6f6f5809d0 Fix volume step error in Squeezebox media player (#150760) 2025-08-17 17:07:23 +02:00
Aidan Timson
1f43f82ea6 Update systembridgeconnector to 4.1.10 (#150736) 2025-08-17 17:03:46 +02:00
Yevhenii Vaskivskyi
942274234e Add asusrouter logger definition to asuswrt (#150747) 2025-08-17 16:59:02 +02:00
Maciej Bieniek
f03955b773 NextDNS tests improvements (#150791) 2025-08-17 16:56:25 +02:00
jan iversen
27ac375183 Remove unused strings in modbus (#150795) 2025-08-17 16:21:28 +02:00
Jamin
c951728767 VOIP RTP cleanup (#150490) 2025-08-17 16:16:20 +02:00
Paulus Schoutsen
3496494290 Remove filters from device analytics payload (#150771) 2025-08-17 16:15:02 +02:00
jan iversen
e90183391e Modbus: Delay start after connection is made. (#150526) 2025-08-17 16:09:24 +02:00
Joost Lekkerkerker
90558c517b Add info to Bravia device (#150690) 2025-08-17 15:30:46 +02:00
epenet
7fba94747e Add Tuya test fixtures (#150793) 2025-08-17 14:05:58 +02:00
Thomas Schamm
3b4b478afa Fix for bosch_shc: 'device_registry.async_get_or_create' referencing a non existing 'via_device' (#150756) 2025-08-17 10:49:04 +02:00
Joakim Plate
a3640c5664 feat: switch to model id for togrill (#150750) 2025-08-17 06:30:05 +02:00
Michael
246a181ad4 Fix restrict-task-creation workflow (#150774) 2025-08-17 01:56:56 +02:00
Thomas Schamm
d642ecb302 Bump boschshcpy to 0.2.107 (#150754) 2025-08-17 00:37:44 +02:00
Yevhenii Vaskivskyi
53889165b5 Bump asusrouter to 1.19.0 (#150742) 2025-08-16 21:32:27 +02:00
Marc Mueller
fe32e74910 Update charset-normalizer to 3.4.3 (#150770) 2025-08-16 21:31:14 +02:00
dontinelli
a71ae4db37 Add min/max values as extra attributes for measurements for fyta (#150562) 2025-08-16 20:49:55 +02:00
Marc Mueller
0d5ebdb692 Update hassfest package exceptions (#150744) 2025-08-16 12:52:26 +02:00
Denis Shulyaka
80e720f663 Add external tools support for chat log (#150461) 2025-08-16 12:20:20 +02:00
epenet
616b031df8 Use constants in Tuya tests (#150739) 2025-08-16 11:00:08 +02:00
Tom
bcdece4455 Add additional sensors to airOS (#150712) 2025-08-16 08:43:47 +02:00
Joost Lekkerkerker
1aa3efaf8a Add support for fineDustSensor capability in SmartThings (#150714) 2025-08-16 08:41:28 +02:00
Luke Lashley
7f16b11776 Improve roborock resume cleaning logic (#150726) 2025-08-16 08:40:46 +02:00
Maciej Bieniek
078b7224fc Add "bypass age verification" switch to NextDNS integration (#150716) 2025-08-15 21:46:06 +03:00
Denis Shulyaka
d5970e7733 Anthropic thinking content (#150341) 2025-08-15 15:52:36 +02:00
Joost Lekkerkerker
d5a74892e6 Remove unnecessary hass assignment in coordinators (#150696)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-15 15:52:13 +02:00
Joost Lekkerkerker
793a829236 Add serial number to Vodafone Station device (#150709) 2025-08-15 15:52:01 +02:00
G Johansson
7670146faf Improve handling decode errors in rest (#150699) 2025-08-15 15:51:48 +02:00
Nick Kuiper
eaedefe105 Update bluecurrent-api to 1.3.1 (#150559) 2025-08-15 15:45:40 +02:00
Marc Mueller
4f20776e0e Add check for dependency package names in hassfest (#150630) 2025-08-15 15:44:47 +02:00
epenet
6c21a14be4 Add binary sensor to 1-Wire DS2405 (#150679) 2025-08-15 15:37:34 +02:00
Alex Thompson
9015743483 Bump tilt-ble to 0.3.1 (#150711) 2025-08-15 15:37:08 +02:00
Thomas D
2a62e033dd Add binary sensor platform to qbus integration (#149975) 2025-08-15 15:35:51 +02:00
Joost Lekkerkerker
f72f2a326a Add MAC address to Modern forms devices (#150698) 2025-08-15 15:34:31 +02:00
Joost Lekkerkerker
61de50dfc0 Add hw_version to Point device (#150704) 2025-08-15 15:34:10 +02:00
Joost Lekkerkerker
ef7ed026db Add serial number to Ondilo ICO (#150702) 2025-08-15 15:33:13 +02:00
Joost Lekkerkerker
abdb48e7ce Add serial number to Nobo hub devices (#150700) 2025-08-15 15:32:43 +02:00
Joost Lekkerkerker
9646aa232a Add serial number to Zeversolar device (#150710) 2025-08-15 15:31:29 +02:00
Joost Lekkerkerker
635cfe7d17 Remove hass assignment in Openhome (#150703) 2025-08-15 15:30:01 +02:00
Joost Lekkerkerker
1e2f7cadc7 Add unregister hook to Vera (#150708) 2025-08-15 15:27:49 +02:00
Tom
94e9f32da5 Bump airOS to 0.3.0 (#150693) 2025-08-15 15:24:23 +02:00
Maciej Bieniek
b7ba99ed17 Bump nextdns to version 4.1.0 (#150706) 2025-08-15 15:24:05 +02:00
Joost Lekkerkerker
ebbeef8021 Add mac to Ambient station device (#150689) 2025-08-15 15:15:22 +02:00
Joost Lekkerkerker
8da75490c0 Add hw_version to RainMachine device (#150705) 2025-08-15 15:04:59 +02:00
Joost Lekkerkerker
bc89e8fd3c Move Notion hardware revision to hw_version (#150701) 2025-08-15 15:03:30 +02:00
Joost Lekkerkerker
602497904b Set firmware version to the right field in Guardian (#150697) 2025-08-15 15:01:42 +02:00
G Johansson
facf217b99 Fix missing labels for subdiv in workday (#150684) 2025-08-15 13:59:35 +02:00
Joost Lekkerkerker
b300654e15 Add serial number to Dremel device (#150691) 2025-08-15 13:58:44 +02:00
Joost Lekkerkerker
a742125f13 Add serial number to Emonitor device (#150692) 2025-08-15 13:58:23 +02:00
Thomas D
64768b1036 Fix re-auth flow for Volvo integration (#150478) 2025-08-15 13:58:03 +02:00
Petro31
792bb5781d Fix optimistic set to false for template entities (#150421) 2025-08-15 13:53:48 +02:00
Jan Bouwhuis
7bd126dc8e Assert the MQTT config entry is reloaded on subentry creation and mutation (#150636) 2025-08-15 13:04:12 +02:00
Joakim Sørensen
83ee380b17 Bump hass-nabucasa from 0.111.2 to 1.0.0 and refactor related code (#150566) 2025-08-15 11:35:52 +02:00
Ludovic BOUÉ
58f8b3c401 Bump Python Matter server to 8.1.0 (#150631) 2025-08-15 11:29:49 +02:00
Marc Mueller
2a6d1180f4 Update py-madvr2 to 1.6.40 (#150647) 2025-08-15 08:13:22 +02:00
J. Nick Koston
00b765893d Bump onvif-zeep-async to 4.0.3 (#150663) 2025-08-15 05:49:31 +02:00
karwosts
3e9e9b0489 Fix demo media_player.browse browsing (#150669) 2025-08-15 05:47:55 +02:00
Luke Lashley
25f7c02498 Bump python-snoo to 0.8.3 (#150670) 2025-08-15 05:46:59 +02:00
Manu
a785f3d509 Increase test coverage of Habitica (#150671) 2025-08-15 05:45:42 +02:00
J. Nick Koston
9f36b2dcde Bump protobuf to 6.32.0 (#150667) 2025-08-15 02:31:10 +02:00
Michael Hansen
57265ac648 Add fuzzy matching to default agent (#150595) 2025-08-14 16:28:42 -05:00
J. Nick Koston
f5fe53a67f Bump uiprotect to 7.21.1 (#150657) 2025-08-14 16:16:04 -05:00
Arie Catsman
7e6ceee9d1 Add IQ Meter Collar and C6 Combiner to enphase_envoy integration (#150649) 2025-08-14 15:34:37 -05:00
DeerMaximum
9c21965a34 Add diagnostics to NINA (#150638) 2025-08-14 19:57:33 +02:00
rwrozelle
1ea740d81c Add media_player add off on capability to esphome (#147990) 2025-08-14 12:07:01 -05:00
rwrozelle
6e98446523 Media player API enumeration alignment and feature flags (#149597)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-08-14 11:24:43 -05:00
Ludovic BOUÉ
2248584a0f Add Matter Electrical measurements additional attributes (#150188)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-14 17:07:18 +02:00
Martin Hjelmare
d9b6f82639 Add Z-Wave Fortrezz SSA2 discovery (#150629) 2025-08-14 17:37:44 +03:00
DeerMaximum
3eecfa8e57 Set PARALLEL_UPDATES in NINA (#150635) 2025-08-14 16:36:04 +02:00
epenet
382e7dfd39 Add Tuya test fixtures (#150622) 2025-08-14 14:51:43 +02:00
Joost Lekkerkerker
5358c89bfd Add fixtures for one door refrigerator in SmartThings (#150632) 2025-08-14 14:51:20 +02:00
Tom
e6103fdcf4 Bump airOS to 0.2.11 (#150627) 2025-08-14 13:43:32 +02:00
Martin Dybal
02dca5f0ad Fix type annotation for climate _attr_current_humidity (#150615) 2025-08-14 12:55:54 +02:00
Ludovic BOUÉ
cc4b9e0eca Extend UnitOfReactivePower with 'mvar' (#150415) 2025-08-14 11:46:06 +02:00
Joost Lekkerkerker
7e28e3dcd3 Add sw_version to JustNimbus device (#150592) 2025-08-14 09:31:43 +02:00
Joakim Plate
bb3d571887 Make sure we update the api version in philips_js discovery (#150604) 2025-08-14 09:30:47 +02:00
Joakim Plate
5a789cbbc8 Bump togrill to 0.7.0 in preperation for number (#150611) 2025-08-14 09:30:02 +02:00
dependabot[bot]
4954c2a84b Bump actions/ai-inference from 1.2.8 to 2.0.0 (#150619)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-14 09:27:18 +02:00
G Johansson
f28e9f60ee Use runtime_data in pvpc_hourly_pricing (#150565) 2025-08-14 01:03:04 +02:00
Luke Lashley
6a4bf4ec72 Bump python-snoo to 0.8.2 (#150569) 2025-08-14 00:12:18 +02:00
Luke Lashley
12706178c2 Change Snoo to use MQTT instead of PubNub (#150570) 2025-08-14 00:11:52 +02:00
Samuel Xiao
ed39b18d94 Add cover platform for switchbot cloud (#148993) 2025-08-14 00:10:19 +02:00
G Johansson
9999807891 Use OptionsFlowWithReload in coinbase (#150587) 2025-08-13 23:48:20 +02:00
Arie Catsman
b5db0e98b4 Bump pyenphase to 2.3.0 (#150600) 2025-08-13 23:44:07 +02:00
Åke Strandberg
f58b2177a2 Bump pymiele to 0.5.4 (#150605) 2025-08-13 23:42:47 +02:00
G Johansson
4f64014816 Add wind gust sensor to OpenWeatherMap (#150607) 2025-08-13 23:34:12 +02:00
Michael Hansen
cf68214c4d Bump hassil to 3.1.0 (#150584) 2025-08-13 20:58:57 +02:00
Marc Mueller
b3d3284f5c Update types packages (#150586) 2025-08-13 20:55:22 +02:00
Marc Mueller
12c346f550 Update orjson to 3.11.2 (#150588) 2025-08-13 20:53:55 +02:00
HarvsG
bda82e19a5 Pi_hole - Account for auth succeeding when it shouldn't (#150413) 2025-08-13 20:53:21 +02:00
Marc Mueller
f7726a7563 Update pre-commit-hooks to 6.0.0 (#150583) 2025-08-13 19:23:26 +02:00
Michael Hansen
2c0ed2cbfe Add intent for setting fan speed (#150576) 2025-08-13 18:57:25 +02:00
Marc Mueller
13376ef896 Fix RuntimeWarning in asuswrt tests (#150580) 2025-08-13 18:33:02 +02:00
Marc Mueller
d18cc3d6c3 Fix RuntimeWarning in squeezebox tests (#150582) 2025-08-13 18:32:50 +02:00
karwosts
b40aab479a Change monetary translation to 'Monetary balance' (#150054) 2025-08-13 17:21:36 +02:00
Michael Hansen
721f9a40d8 Add volume up/down intents for media players (#150443) 2025-08-13 09:35:37 -05:00
Ludovic BOUÉ
eb4b75a9a7 Extend UnitOfApparentPower with 'mVA' (#150422)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-13 15:56:04 +02:00
epenet
b40f381164 Add Tuya test fixture (#150557) 2025-08-13 14:09:19 +02:00
epenet
51413b7a8d Ensure Tuya fans have at least one valid DPCode (#150550) 2025-08-13 13:40:11 +02:00
Foscam-wangzhengyu
ff694a0058 Foscam Add prompt language and modify the default port to a more compatible (#150536) 2025-08-13 13:21:39 +02:00
Joakim Sørensen
eea04558a9 Move alexa access token updates to new handler (#150466)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-08-13 13:21:28 +02:00
starkillerOG
5ad2a27918 Use camera name in Reolink tests (#150555) 2025-08-13 13:06:12 +02:00
G Johansson
f39305f64e Remove deprecated json helper constants and function (#150111) 2025-08-13 12:42:00 +02:00
karwosts
7fba0ca2c0 Add 'all' option to light/switch group config flow (#149671) 2025-08-13 12:34:58 +02:00
Pete Sage
51fbccd125 Fix Sonos CI issue part 2 (#150529)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-08-13 12:26:24 +02:00
G Johansson
5fc2e6ed53 Add async_update_reload_and_abort to config entry subentries (#149768)
Co-authored-by: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com>
2025-08-13 11:59:37 +02:00
Christopher Fenner
5a7f7d90a0 move Volvo car connection status sensor to diagnostic section (#150487) 2025-08-13 11:45:05 +02:00
Luke Lashley
6d34d34ce1 Bump python-snoo to 0.8.1 (#150530) 2025-08-13 11:38:18 +02:00
dependabot[bot]
6454f40c3c Bump github/codeql-action from 3.29.8 to 3.29.9 (#150539)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-13 11:36:43 +02:00
epenet
53e40a6b8c Ensure Tuya humidifiers have at least one valid DPCode (#150546) 2025-08-13 11:25:59 +02:00
J. Nick Koston
8a54a1d95c Bump aioesphomeapi to 39.0.0 (#150523) 2025-08-13 03:17:20 -05:00
Yevhenii Vaskivskyi
8a52e9ca01 Bump asusrouter to 1.18.2 (#150541) 2025-08-13 10:46:08 +03:00
Robert Resch
d9ca253c6c Bump uv to 0.8.9 (#150542) 2025-08-13 09:45:54 +02:00
Pete Sage
b7853ea9bd Fix Sonos CI Issue (#150518)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 23:18:59 +02:00
J. Nick Koston
d19e410ea8 Bump aiodhcpwatcher to 1.2.1 (#150519) 2025-08-12 22:07:25 +02:00
Shay Levy
83f911e4ff Bump aiowebostv to 0.7.5 (#150514) 2025-08-12 22:53:56 +03:00
jan iversen
452322e971 Modbus: Do not remove non-duplicate error log. (#150511) 2025-08-12 21:16:43 +02:00
Manu
6fa7c6cb81 Add party to Habitica (#149608) 2025-08-12 20:51:12 +02:00
J. Nick Koston
ed6072d46b Bump bleak-retry-connector to 4.0.1 (#150515) 2025-08-12 20:49:43 +02:00
Yevhenii Vaskivskyi
9fdc632780 Switch asuswrt http(s) library to asusrouter package (#150426) 2025-08-12 20:45:39 +02:00
Norbert Rittel
4d426c31f9 Fix missing sentence-case in hydrawise (#150513) 2025-08-12 20:10:43 +02:00
jan iversen
ea946c90b3 Modbus: Cancel connect background task if stopping/restarting. (#150507) 2025-08-12 19:38:17 +02:00
Tom
fb68b2d454 Bump airOS to 0.2.8 (#150504) 2025-08-12 19:27:27 +02:00
Ludovic BOUÉ
2ebe0a929e Matter SmokeCoAlarm SelfTestRequest (#150497) 2025-08-12 19:10:55 +02:00
Manu
c1e5a7efc9 Add icons to Sleep as Android sensor entities (#150451) 2025-08-12 18:23:27 +02:00
dependabot[bot]
561ef7015c Bump actions/checkout from 4.2.2 to 5.0.0 (#150494)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-12 18:17:44 +02:00
Robin Lintermann
b4270e019e Bump pysmarlaapi to 0.9.2 (#150496) 2025-08-12 18:14:32 +02:00
Joost Lekkerkerker
614bf96fb9 Add model_id to Philips Hue (#150499) 2025-08-12 18:09:14 +02:00
Tucker Kern
ca290ee631 Implement Snapcast grouping with standard HA actions (#146855)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 18:07:29 +02:00
epenet
ad3174f6e6 Rename Tuya parsing models (#150498) 2025-08-12 18:02:26 +02:00
jan iversen
218b0738ca Modbus: Remove wrong comment on non-existing parameter. (#150501) 2025-08-12 18:00:51 +02:00
Joakim Plate
98e6e20079 Mock habluetooth adapters (#148919) 2025-08-12 10:46:31 -05:00
Norbert Rittel
89aa349881 Fix spelling of "an HS color command" in template (#150495) 2025-08-12 17:18:27 +02:00
wedsa5
07930b12d0 Fix brightness command not sent when in white color mode (#150439)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-12 16:36:52 +02:00
Renat Sibgatulin
711afa306c Add number platform for LED brightness to air-Q (#150492) 2025-08-12 15:39:28 +02:00
epenet
a3904ce60c Sort Tuya DPCodes alphabetically (#150477) 2025-08-12 15:28:42 +02:00
hanwg
455cf2fb42 Add notify platform for Telegram bot (#149853)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 15:24:13 +02:00
Aarni Koskela
072ae2b955 ruuvitag_ble: add new sensors (#150435)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-12 14:19:15 +02:00
epenet
2b70639b11 Add device registry snapshots to Tuya (#150482)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-12 14:04:36 +02:00
Åke Strandberg
2612dbeb9b Add missing boost2 code for Miele hobs (#150481) 2025-08-12 13:58:38 +02:00
Matrix
7ebdd24224 Bump yolink api to 0.5.8 (#150480) 2025-08-12 13:55:04 +02:00
Martin Hjelmare
66ff1cf005 Improve Z-Wave manual config flow step description (#150479) 2025-08-12 13:47:11 +02:00
David
08aae4bf49 Fix error of the Powerfox integration in combination with the new Powerfox FLOW adapter (#150429)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-12 12:45:21 +02:00
Etienne C.
313b5a483c Remove rounding of Waze duration sensor (#150424) 2025-08-12 12:20:48 +02:00
Arie Catsman
8edbcc92d3 Fix enphase_envoy non existing via device warning at first config. (#149010)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-12 11:55:43 +02:00
peteS-UK
067cab71fa Additional Fix error on startup when no Apps or Radio plugins are installed for Squeezebox (#150475) 2025-08-12 11:55:21 +02:00
Nippey
596e4883b1 Add more sensors to Tuya weather station (#150442)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-12 11:33:51 +02:00
yufeng
fb4a452872 Add supply frequency sensors to Tuya energy monitoring devices (#149320)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2025-08-12 11:02:03 +02:00
yufeng
5b232226e9 Add timers and switches to Tuya irrigation systems (#149236) 2025-08-12 10:53:08 +02:00
J. Nick Koston
db81610983 Bump aioesphomeapi to 38.2.1 (#150455) 2025-08-12 10:46:53 +02:00
epenet
8f5c8caf07 Add mute switch to Tuya smoke detectors (#150469) 2025-08-12 10:45:39 +02:00
Matrix
f6af524ddf Fix YoLink valve state when device running in class A mode (#150456) 2025-08-12 10:42:40 +02:00
Norbert Rittel
e0a8c9b458 Fix missing sentence-casing in somfy_mylink (#150463) 2025-08-12 10:30:38 +02:00
Cyrill Raccaud
c46412ee5b Bump cookidoo-api to 0.14.0 (#150450) 2025-08-12 09:51:39 +02:00
Mike Degatano
a06df2a680 Make disk_lifetime issue into a repair (#150140) 2025-08-12 08:39:37 +02:00
epenet
68fbcc8665 Add pymodbus to package constraints (#150419)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-12 00:50:05 +02:00
Manu
6cde5cfdcc Add diagnostics platform to Sleep as Android (#150447) 2025-08-11 23:47:07 +02:00
Wesley Vos
5605f5896a Remove the battery feature from supported features (#150101) 2025-08-11 23:26:27 +02:00
Manu
93c30f1b59 Add sensor platform to Sleep as Android (#150440) 2025-08-11 23:25:51 +02:00
Pete Sage
6e3ccbefc2 Add quality scale for Sonos (#144928) 2025-08-11 22:50:47 +02:00
Noah Husby
715dc12792 Add media browsing to Russound RIO (#148248) 2025-08-11 22:40:40 +02:00
Denis Shulyaka
9cae0e0acc OpenAI thinking content (#150340) 2025-08-11 22:28:36 +02:00
Kevin David
e13702d9b1 Bump python-snoo to 0.7.0 (#150434) 2025-08-11 22:25:41 +02:00
Tsvi Mostovicz
3b358df9e7 Jewish Calendar add coordinator (#141456)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-11 22:22:13 +02:00
Foscam-wangzhengyu
e394435d7c Add more Foscam switches (#147409)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-08-11 22:14:32 +02:00
Aarni Koskela
9e398ffc10 Bump to ruuvitag-ble==0.2.1 (#150436) 2025-08-11 22:05:44 +02:00
tdfountain
065a53a90d Add quality scale and set Platinum for NUT (#143269)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-08-11 21:27:33 +02:00
Manu
91f6b8e1fe Add Sleep as Android integration (#142569) 2025-08-11 20:03:37 +02:00
Michael Hansen
1a9d1a9649 Handle non-streaming TTS case correctly (#150218) 2025-08-11 11:47:29 -05:00
MB901
cb7c7767b5 Add model_id for Freebox integration (#150430) 2025-08-11 18:46:57 +02:00
CubeZ2mDeveloper
d02029143c Add SONOFF Dongle Lite MG21 discovery support in ZHA (#148813)
Co-authored-by: zetao.zheng <1050713479@qq.com>
2025-08-11 12:41:41 -04:00
Robin Lintermann
3eda687d30 Smarla integration sensor platform (#145748) 2025-08-11 17:08:07 +02:00
Jamie Magee
7688c367cc Remove coinbase v2 API support (#148387)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-08-11 16:58:36 +02:00
Manu
a1dc3f3eac Bump habiticalib to version 0.4.2 (#150417) 2025-08-11 15:51:22 +02:00
Martin Hjelmare
d135d08813 Lower Z-Wave firmware check delay (#150411) 2025-08-11 14:09:04 +02:00
Brett Adams
9595759fd1 Add stale device cleanup to Teslemetry (#144523) 2025-08-11 13:54:44 +02:00
Etienne C.
d54f979612 Add a coordinator to Waze Travel Time (#148585) 2025-08-11 13:20:18 +02:00
Paulus Schoutsen
531073acc0 Allow specifying multiple integrations (#150349) 2025-08-11 13:12:29 +02:00
Bouwe Westerdijk
73cbc962f9 Implement snapshot testing for Plugwise binary_sensor platform (#150375) 2025-08-11 13:11:24 +02:00
epenet
34b0b71375 Add Tuya snapshot tests for empty electricity RAW sensors (#150407) 2025-08-11 12:05:33 +02:00
Brett Adams
203c908730 Add charging and preconditioning actions to Teslemetry (#144184) 2025-08-11 11:59:39 +02:00
tronikos
23e6148d3b Create an issue if Opower utility is no longer supported (#150315) 2025-08-11 11:58:12 +02:00
epenet
2a5a66f9d5 Handle empty electricity RAW sensors in Tuya (#150406) 2025-08-11 11:55:47 +02:00
dependabot[bot]
84ce5d65e1 Bump github/codeql-action from 3.29.7 to 3.29.8 (#150405)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-11 11:50:00 +02:00
524 changed files with 33553 additions and 3693 deletions

View File

@@ -27,7 +27,7 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
with:
fetch-depth: 0
@@ -90,7 +90,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -242,7 +242,7 @@ jobs:
- green
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set build additional args
run: |
@@ -279,7 +279,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -321,7 +321,7 @@ jobs:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Install Cosign
uses: sigstore/cosign-installer@v3.9.2
@@ -454,7 +454,7 @@ jobs:
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
@@ -499,7 +499,7 @@ jobs:
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0

View File

@@ -37,7 +37,7 @@ on:
type: boolean
env:
CACHE_VERSION: 4
CACHE_VERSION: 5
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.9"
@@ -94,7 +94,7 @@ jobs:
runs-on: ubuntu-24.04
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: |
@@ -246,7 +246,7 @@ jobs:
- info
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -292,7 +292,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -332,7 +332,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -372,7 +372,7 @@ jobs:
- pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0
id: python
@@ -462,7 +462,7 @@ jobs:
- script/hassfest/docker/Dockerfile
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -481,7 +481,7 @@ jobs:
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -584,7 +584,7 @@ jobs:
sudo apt-get -y install \
libturbojpeg
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -617,7 +617,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -651,7 +651,7 @@ jobs:
&& github.event_name == 'pull_request'
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Dependency review
uses: actions/dependency-review-action@v4.7.1
with:
@@ -674,7 +674,7 @@ jobs:
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -717,7 +717,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -764,7 +764,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -809,7 +809,7 @@ jobs:
- base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -886,7 +886,7 @@ jobs:
libturbojpeg \
libgammu-dev
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@v5.6.0
@@ -947,7 +947,7 @@ jobs:
libgammu-dev \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1080,7 +1080,7 @@ jobs:
libmariadb-dev-compat \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1222,7 +1222,7 @@ jobs:
sudo apt-get -y install \
postgresql-server-dev-14
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1334,7 +1334,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0
with:
@@ -1381,7 +1381,7 @@ jobs:
libgammu-dev \
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.6.0
@@ -1484,7 +1484,7 @@ jobs:
timeout-minutes: 10
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download all coverage artifacts
uses: actions/download-artifact@v5.0.0
with:

View File

@@ -21,14 +21,14 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.29.7
uses: github/codeql-action/init@v3.29.9
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.29.7
uses: github/codeql-action/analyze@v3.29.9
with:
category: "/language:python"

View File

@@ -231,7 +231,7 @@ jobs:
- name: Detect duplicates using AI
id: ai_detection
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/ai-inference@v1.2.8
uses: actions/ai-inference@v2.0.0
with:
model: openai/gpt-4o
system-prompt: |

View File

@@ -57,7 +57,7 @@ jobs:
- name: Detect language using AI
id: ai_language_detection
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/ai-inference@v1.2.8
uses: actions/ai-inference@v2.0.0
with:
model: openai/gpt-4o-mini
system-prompt: |

View File

@@ -9,7 +9,7 @@ jobs:
check-authorization:
runs-on: ubuntu-latest
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.issue_type == 'Task'
if: github.event.issue.type.name == 'Task'
steps:
- name: Check if user is authorized
uses: actions/github-script@v7

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.6.0

View File

@@ -32,7 +32,7 @@ jobs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
@@ -135,7 +135,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download env_file
uses: actions/download-artifact@v5.0.0
@@ -184,7 +184,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.2
uses: actions/checkout@v5.0.0
- name: Download env_file
uses: actions/download-artifact@v5.0.0

View File

@@ -18,7 +18,7 @@ repos:
exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v6.0.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]

View File

@@ -466,6 +466,7 @@ homeassistant.components.simplisafe.*
homeassistant.components.siren.*
homeassistant.components.skybell.*
homeassistant.components.slack.*
homeassistant.components.sleep_as_android.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.smlight.*

10
CODEOWNERS generated
View File

@@ -156,8 +156,8 @@ build.json @home-assistant/supervisor
/tests/components/assist_pipeline/ @balloob @synesthesiam
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
/tests/components/asuswrt/ @kennedyshead @ollo69
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
/homeassistant/components/atag/ @MatsNL
/tests/components/atag/ @MatsNL
/homeassistant/components/aten_pe/ @mtdcr
@@ -438,8 +438,8 @@ build.json @home-assistant/supervisor
/tests/components/enigma2/ @autinerd
/homeassistant/components/enocean/ @bdurrer
/tests/components/enocean/ @bdurrer
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
/homeassistant/components/entur_public_transport/ @hfurubotten
/homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie
@@ -1415,6 +1415,8 @@ build.json @home-assistant/supervisor
/tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau
/tests/components/slack/ @tkdrob @fletcherau
/homeassistant/components/sleep_as_android/ @tr4nt0r
/tests/components/sleep_as_android/ @tr4nt0r
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
/tests/components/sleepiq/ @mfugate1 @kbickar
/homeassistant/components/slide/ @ualex73

2
Dockerfile generated
View File

@@ -31,7 +31,7 @@ RUN \
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.7.1
RUN pip3 install uv==0.8.9
WORKDIR /usr/src

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.2.7"]
"requirements": ["airos==0.3.0"]
}

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
import logging
from airos.data import NetRole, WirelessMode
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -19,6 +19,8 @@ from homeassistant.const import (
SIGNAL_STRENGTH_DECIBELS,
UnitOfDataRate,
UnitOfFrequency,
UnitOfLength,
UnitOfTime,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -29,8 +31,11 @@ from .entity import AirOSEntity
_LOGGER = logging.getLogger(__name__)
WIRELESS_MODE_OPTIONS = [mode.value.replace("-", "_").lower() for mode in WirelessMode]
NETROLE_OPTIONS = [mode.value for mode in NetRole]
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
@@ -118,6 +123,41 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
value_fn=lambda data: data.wireless.polling.ul_capacity,
),
AirOSSensorEntityDescription(
key="host_uptime",
translation_key="host_uptime",
native_unit_of_measurement=UnitOfTime.SECONDS,
device_class=SensorDeviceClass.DURATION,
suggested_display_precision=0,
suggested_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda data: data.host.uptime,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_distance",
translation_key="wireless_distance",
native_unit_of_measurement=UnitOfLength.METERS,
device_class=SensorDeviceClass.DISTANCE,
suggested_display_precision=1,
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
value_fn=lambda data: data.wireless.distance,
),
AirOSSensorEntityDescription(
key="wireless_mode",
translation_key="wireless_mode",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.mode.value,
options=WIRELESS_MODE_OPTIONS,
entity_registry_enabled_default=False,
),
AirOSSensorEntityDescription(
key="wireless_role",
translation_key="wireless_role",
device_class=SensorDeviceClass.ENUM,
value_fn=lambda data: data.derived.role.value,
options=WIRELESS_ROLE_OPTIONS,
entity_registry_enabled_default=False,
),
)

View File

@@ -77,6 +77,26 @@
},
"wireless_remote_hostname": {
"name": "Remote hostname"
},
"host_uptime": {
"name": "Uptime"
},
"wireless_distance": {
"name": "Wireless distance"
},
"wireless_role": {
"name": "Wireless role",
"state": {
"access_point": "Access point",
"station": "Station"
}
},
"wireless_mode": {
"name": "Wireless mode",
"state": {
"point_to_point": "Point-to-point",
"point_to_multipoint": "Point-to-multipoint"
}
}
}
},

View File

@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
from .coordinator import AirQCoordinator
PLATFORMS: list[Platform] = [Platform.SENSOR]
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
AirQConfigEntry = ConfigEntry[AirQCoordinator]

View File

@@ -75,6 +75,7 @@ class AirQCoordinator(DataUpdateCoordinator):
return_average=self.return_average,
clip_negative_values=self.clip_negative,
)
data["brightness"] = await self.airq.get_current_brightness()
if warming_up_sensors := identify_warming_up_sensors(data):
_LOGGER.debug(
"Following sensors are still warming up: %s", warming_up_sensors

View File

@@ -0,0 +1,85 @@
"""Definition of air-Q number platform used to control the LED strips."""
from __future__ import annotations
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
from aioairq.core import AirQ
from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.const import PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import AirQConfigEntry, AirQCoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class AirQBrightnessDescription(NumberEntityDescription):
"""Describes AirQ number entity responsible for brightness control."""
value: Callable[[dict], float]
set_value: Callable[[AirQ, float], Awaitable[None]]
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
key="airq_led_brightness",
translation_key="airq_led_brightness",
native_min_value=0.0,
native_max_value=100.0,
native_step=1.0,
native_unit_of_measurement=PERCENTAGE,
value=lambda data: data["brightness"],
set_value=lambda device, value: device.set_current_brightness(value),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AirQConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up number entities: a single entity for the LEDs."""
coordinator = entry.runtime_data
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
async_add_entities(entities)
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
"""Representation of the LEDs from a single AirQ."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AirQCoordinator,
description: AirQBrightnessDescription,
) -> None:
"""Initialize a single sensor."""
super().__init__(coordinator)
self.entity_description: AirQBrightnessDescription = description
self._attr_device_info = coordinator.device_info
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
@property
def native_value(self) -> float:
"""Return the brightness of the LEDs in %."""
return self.entity_description.value(self.coordinator.data)
async def async_set_native_value(self, value: float) -> None:
"""Set the brightness of the LEDs to the value in %."""
_LOGGER.debug(
"Changing LED brighntess from %.0f%% to %.0f%%",
self.coordinator.data["brightness"],
value,
)
await self.entity_description.set_value(self.coordinator.airq, value)
await self.coordinator.async_request_refresh()

View File

@@ -35,6 +35,11 @@
}
},
"entity": {
"number": {
"airq_led_brightness": {
"name": "LED brightness"
}
},
"sensor": {
"acetaldehyde": {
"name": "Acetaldehyde"

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from aioambient.util import get_public_device_id
from homeassistant.core import callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity, EntityDescription
@@ -37,6 +37,7 @@ class AmbientWeatherEntity(Entity):
identifiers={(DOMAIN, mac_address)},
manufacturer="Ambient Weather",
name=station_name.capitalize(),
connections={(CONNECTION_NETWORK_MAC, mac_address)},
)
self._attr_unique_id = f"{mac_address}_{description.key}"

View File

@@ -390,7 +390,6 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
async def async_devices_payload(hass: HomeAssistant) -> dict:
"""Return the devices payload."""
integrations_without_model_id: set[str] = set()
devices: list[dict[str, Any]] = []
dev_reg = dr.async_get(hass)
# Devices that need via device info set
@@ -400,10 +399,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
seen_integrations = set()
for device in dev_reg.devices.values():
# Ignore services
if device.entry_type:
continue
if not device.primary_config_entry:
continue
@@ -414,13 +409,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
seen_integrations.add(config_entry.domain)
if not device.model_id:
integrations_without_model_id.add(config_entry.domain)
continue
if not device.manufacturer:
continue
new_indexes[device.id] = len(devices)
devices.append(
{
@@ -432,8 +420,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
"hw_version": device.hw_version,
"has_configuration_url": device.configuration_url is not None,
"via_device": None,
"entry_type": device.entry_type.value if device.entry_type else None,
}
)
if device.via_device_id:
via_devices[device.id] = device.via_device_id
@@ -453,15 +443,11 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
for device_info in devices:
if integration := integrations.get(device_info["integration"]):
device_info["is_custom_integration"] = not integration.is_built_in
# Include version for custom integrations
if not integration.is_built_in and integration.version:
device_info["custom_integration_version"] = str(integration.version)
return {
"version": "home-assistant:1",
"no_model_id": sorted(
[
domain
for domain in integrations_without_model_id
if domain in integrations and integrations[domain].is_built_in
]
),
"devices": devices,
}

View File

@@ -30,10 +30,9 @@ class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]):
cam: PyDroidIPCam,
) -> None:
"""Initialize the Android IP Webcam."""
self.hass = hass
self.cam = cam
super().__init__(
self.hass,
hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",

View File

@@ -2,11 +2,10 @@
from collections.abc import AsyncGenerator, Callable, Iterable
import json
from typing import Any, cast
from typing import Any
import anthropic
from anthropic import AsyncStream
from anthropic._types import NOT_GIVEN
from anthropic.types import (
InputJSONDelta,
MessageDeltaUsage,
@@ -17,7 +16,6 @@ from anthropic.types import (
RawContentBlockStopEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RedactedThinkingBlock,
RedactedThinkingBlockParam,
SignatureDelta,
@@ -35,6 +33,7 @@ from anthropic.types import (
ToolUseBlockParam,
Usage,
)
from anthropic.types.message_create_params import MessageCreateParamsStreaming
from voluptuous_openapi import convert
from homeassistant.components import conversation
@@ -129,6 +128,28 @@ def _convert_content(
)
)
if isinstance(content.native, ThinkingBlock):
messages[-1]["content"].append( # type: ignore[union-attr]
ThinkingBlockParam(
type="thinking",
thinking=content.thinking_content or "",
signature=content.native.signature,
)
)
elif isinstance(content.native, RedactedThinkingBlock):
redacted_thinking_block = RedactedThinkingBlockParam(
type="redacted_thinking",
data=content.native.data,
)
if isinstance(messages[-1]["content"], str):
messages[-1]["content"] = [
TextBlockParam(type="text", text=messages[-1]["content"]),
redacted_thinking_block,
]
else:
messages[-1]["content"].append( # type: ignore[attr-defined]
redacted_thinking_block
)
if content.content:
messages[-1]["content"].append( # type: ignore[union-attr]
TextBlockParam(type="text", text=content.content)
@@ -152,10 +173,9 @@ def _convert_content(
return messages
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
async def _transform_stream(
chat_log: conversation.ChatLog,
result: AsyncStream[MessageStreamEvent],
messages: list[MessageParam],
stream: AsyncStream[MessageStreamEvent],
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
"""Transform the response stream into HA format.
@@ -186,31 +206,25 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
Each message could contain multiple blocks of the same type.
"""
if result is None:
if stream is None:
raise TypeError("Expected a stream of messages")
current_message: MessageParam | None = None
current_block: (
TextBlockParam
| ToolUseBlockParam
| ThinkingBlockParam
| RedactedThinkingBlockParam
| None
) = None
current_tool_block: ToolUseBlockParam | None = None
current_tool_args: str
input_usage: Usage | None = None
has_content = False
has_native = False
async for response in result:
async for response in stream:
LOGGER.debug("Received response: %s", response)
if isinstance(response, RawMessageStartEvent):
if response.message.role != "assistant":
raise ValueError("Unexpected message role")
current_message = MessageParam(role=response.message.role, content=[])
input_usage = response.message.usage
elif isinstance(response, RawContentBlockStartEvent):
if isinstance(response.content_block, ToolUseBlock):
current_block = ToolUseBlockParam(
current_tool_block = ToolUseBlockParam(
type="tool_use",
id=response.content_block.id,
name=response.content_block.name,
@@ -218,75 +232,64 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
)
current_tool_args = ""
elif isinstance(response.content_block, TextBlock):
current_block = TextBlockParam(
type="text", text=response.content_block.text
)
yield {"role": "assistant"}
if has_content:
yield {"role": "assistant"}
has_native = False
has_content = True
if response.content_block.text:
yield {"content": response.content_block.text}
elif isinstance(response.content_block, ThinkingBlock):
current_block = ThinkingBlockParam(
type="thinking",
thinking=response.content_block.thinking,
signature=response.content_block.signature,
)
if has_native:
yield {"role": "assistant"}
has_native = False
has_content = False
elif isinstance(response.content_block, RedactedThinkingBlock):
current_block = RedactedThinkingBlockParam(
type="redacted_thinking", data=response.content_block.data
)
LOGGER.debug(
"Some of Claudes internal reasoning has been automatically "
"encrypted for safety reasons. This doesnt affect the quality of "
"responses"
)
if has_native:
yield {"role": "assistant"}
has_native = False
has_content = False
yield {"native": response.content_block}
has_native = True
elif isinstance(response, RawContentBlockDeltaEvent):
if current_block is None:
raise ValueError("Unexpected delta without a block")
if isinstance(response.delta, InputJSONDelta):
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
text_block = cast(TextBlockParam, current_block)
text_block["text"] += response.delta.text
yield {"content": response.delta.text}
elif isinstance(response.delta, ThinkingDelta):
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["thinking"] += response.delta.thinking
yield {"thinking_content": response.delta.thinking}
elif isinstance(response.delta, SignatureDelta):
thinking_block = cast(ThinkingBlockParam, current_block)
thinking_block["signature"] += response.delta.signature
yield {
"native": ThinkingBlock(
type="thinking",
thinking="",
signature=response.delta.signature,
)
}
has_native = True
elif isinstance(response, RawContentBlockStopEvent):
if current_block is None:
raise ValueError("Unexpected stop event without a current block")
if current_block["type"] == "tool_use":
# tool block
if current_tool_block is not None:
tool_args = json.loads(current_tool_args) if current_tool_args else {}
current_block["input"] = tool_args
current_tool_block["input"] = tool_args
yield {
"tool_calls": [
llm.ToolInput(
id=current_block["id"],
tool_name=current_block["name"],
id=current_tool_block["id"],
tool_name=current_tool_block["name"],
tool_args=tool_args,
)
]
}
elif current_block["type"] == "thinking":
# thinking block
LOGGER.debug("Thinking: %s", current_block["thinking"])
if current_message is None:
raise ValueError("Unexpected stop event without a current message")
current_message["content"].append(current_block) # type: ignore[union-attr]
current_block = None
current_tool_block = None
elif isinstance(response, RawMessageDeltaEvent):
if (usage := response.usage) is not None:
chat_log.async_trace(_create_token_stats(input_usage, usage))
if response.delta.stop_reason == "refusal":
raise HomeAssistantError("Potential policy violation detected")
elif isinstance(response, RawMessageStopEvent):
if current_message is not None:
messages.append(current_message)
current_message = None
def _create_token_stats(
@@ -351,48 +354,48 @@ class AnthropicBaseLLMEntity(Entity):
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model_args = MessageCreateParamsStreaming(
model=model,
messages=messages,
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
stream=True,
)
if tools:
model_args["tools"] = tools
if (
model.startswith(tuple(THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET
):
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
model_args = {
"model": model,
"messages": messages,
"tools": tools or NOT_GIVEN,
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
"system": system.content,
"stream": True,
}
if (
model.startswith(tuple(THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET
):
model_args["thinking"] = ThinkingConfigEnabledParam(
type="enabled", budget_tokens=thinking_budget
)
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
try:
stream = await client.messages.create(**model_args)
messages.extend(
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_log, stream),
)
]
)
)
except anthropic.AnthropicError as err:
raise HomeAssistantError(
f"Sorry, I had a problem talking to Anthropic: {err}"
) from err
messages.extend(
_convert_content(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(chat_log, stream, messages),
)
if not isinstance(content, conversation.AssistantContent)
]
)
)
if not chat_log.unresponded_tool_results:
break

View File

@@ -7,10 +7,7 @@ rules:
status: done
comment: |
Consider deriving a base entity.
config-flow-test-coverage:
status: done
comment: |
Consider looking into making a `mock_setup_entry` fixture that just automatically do this.
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:

View File

@@ -11,7 +11,7 @@ import time
from typing import Any, Literal, final
from hassil import Intents, recognize
from hassil.expression import Expression, ListReference, Sequence
from hassil.expression import Expression, Group, ListReference
from hassil.intents import WildcardSlotList
from homeassistant.components import conversation, media_source, stt, tts
@@ -413,7 +413,7 @@ class AssistSatelliteEntity(entity.Entity):
for intent in intents.intents.values():
for intent_data in intent.data:
for sentence in intent_data.sentences:
_collect_list_references(sentence, wildcard_names)
_collect_list_references(sentence.expression, wildcard_names)
for wildcard_name in wildcard_names:
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
@@ -727,9 +727,9 @@ class AssistSatelliteEntity(entity.Entity):
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
"""Collect list reference names recursively."""
if isinstance(expression, Sequence):
seq: Sequence = expression
for item in seq.items:
if isinstance(expression, Group):
grp: Group = expression
for item in grp.items:
_collect_list_references(item, list_names)
elif isinstance(expression, ListReference):
# {list}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==2.2.3"]
"requirements": ["hassil==3.1.0"]
}

View File

@@ -5,15 +5,16 @@ from __future__ import annotations
from abc import ABC, abstractmethod
from collections import namedtuple
from collections.abc import Awaitable, Callable, Coroutine
from datetime import datetime
import functools
import logging
from typing import Any, cast
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
from aiohttp import ClientSession
from pyasuswrt import AsusWrtError, AsusWrtHttp
from pyasuswrt.exceptions import AsusWrtNotAvailableInfoError
from asusrouter import AsusRouter, AsusRouterError
from asusrouter.modules.client import AsusClient
from asusrouter.modules.data import AsusData
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
from homeassistant.const import (
CONF_HOST,
@@ -41,14 +42,13 @@ from .const import (
PROTOCOL_HTTPS,
PROTOCOL_TELNET,
SENSORS_BYTES,
SENSORS_CPU,
SENSORS_LOAD_AVG,
SENSORS_MEMORY,
SENSORS_RATES,
SENSORS_TEMPERATURES,
SENSORS_TEMPERATURES_LEGACY,
SENSORS_UPTIME,
)
from .helpers import clean_dict, translate_to_legacy
SENSORS_TYPE_BYTES = "sensors_bytes"
SENSORS_TYPE_COUNT = "sensors_count"
@@ -310,16 +310,16 @@ class AsusWrtHttpBridge(AsusWrtBridge):
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
"""Initialize Bridge that use HTTP library."""
super().__init__(conf[CONF_HOST])
self._api: AsusWrtHttp = self._get_api(conf, session)
self._api = self._get_api(conf, session)
@staticmethod
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp:
"""Get the AsusWrtHttp API."""
return AsusWrtHttp(
conf[CONF_HOST],
conf[CONF_USERNAME],
conf.get(CONF_PASSWORD, ""),
use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusRouter:
"""Get the AsusRouter API."""
return AsusRouter(
hostname=conf[CONF_HOST],
username=conf[CONF_USERNAME],
password=conf.get(CONF_PASSWORD, ""),
use_ssl=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
port=conf.get(CONF_PORT),
session=session,
)
@@ -327,46 +327,90 @@ class AsusWrtHttpBridge(AsusWrtBridge):
@property
def is_connected(self) -> bool:
"""Get connected status."""
return cast(bool, self._api.is_connected)
return self._api.connected
async def async_connect(self) -> None:
"""Connect to the device."""
await self._api.async_connect()
# Collect the identity
_identity = await self._api.async_get_identity()
# get main router properties
if mac := self._api.mac:
if mac := _identity.mac:
self._label_mac = format_mac(mac)
self._firmware = self._api.firmware
self._model = self._api.model
self._firmware = str(_identity.firmware)
self._model = _identity.model
async def async_disconnect(self) -> None:
"""Disconnect to the device."""
await self._api.async_disconnect()
async def _get_data(
self,
datatype: AsusData,
force: bool = False,
) -> dict[str, Any]:
"""Get data from the device.
This is a generic method which automatically converts to
the Home Assistant-compatible format.
"""
try:
raw = await self._api.async_get_data(datatype, force=force)
return translate_to_legacy(clean_dict(convert_to_ha_data(raw)))
except AsusRouterError as ex:
raise UpdateFailed(ex) from ex
async def _get_sensors(self, datatype: AsusData) -> list[str]:
"""Get the available sensors.
This is a generic method which automatically converts to
the Home Assistant-compatible format.
"""
sensors = []
try:
data = await self._api.async_get_data(datatype)
# Get the list of sensors from the raw data
# and translate in to the legacy format
sensors = translate_to_legacy(convert_to_ha_sensors(data, datatype))
_LOGGER.debug("Available `%s` sensors: %s", datatype.value, sensors)
except AsusRouterError as ex:
_LOGGER.warning(
"Cannot get available `%s` sensors with exception: %s",
datatype.value,
ex,
)
return sensors
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
"""Get list of connected devices."""
api_devices = await self._api.async_get_connected_devices()
api_devices: dict[str, AsusClient] = await self._api.async_get_data(
AsusData.CLIENTS, force=True
)
return {
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
format_mac(mac): WrtDevice(
dev.connection.ip_address, dev.description.name, dev.connection.node
)
for mac, dev in api_devices.items()
if dev.connection is not None
and dev.description is not None
and dev.connection.ip_address is not None
}
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
"""Return a dictionary of available sensors for this bridge."""
sensors_cpu = await self._get_available_cpu_sensors()
sensors_temperatures = await self._get_available_temperature_sensors()
sensors_loadavg = await self._get_loadavg_sensors_availability()
return {
SENSORS_TYPE_BYTES: {
KEY_SENSORS: SENSORS_BYTES,
KEY_METHOD: self._get_bytes,
},
SENSORS_TYPE_CPU: {
KEY_SENSORS: sensors_cpu,
KEY_SENSORS: await self._get_sensors(AsusData.CPU),
KEY_METHOD: self._get_cpu_usage,
},
SENSORS_TYPE_LOAD_AVG: {
KEY_SENSORS: sensors_loadavg,
KEY_SENSORS: await self._get_sensors(AsusData.SYSINFO),
KEY_METHOD: self._get_load_avg,
},
SENSORS_TYPE_MEMORY: {
@@ -382,95 +426,44 @@ class AsusWrtHttpBridge(AsusWrtBridge):
KEY_METHOD: self._get_uptime,
},
SENSORS_TYPE_TEMPERATURES: {
KEY_SENSORS: sensors_temperatures,
KEY_SENSORS: await self._get_sensors(AsusData.TEMPERATURE),
KEY_METHOD: self._get_temperatures,
},
}
async def _get_available_cpu_sensors(self) -> list[str]:
"""Check which cpu information is available on the router."""
try:
available_cpu = await self._api.async_get_cpu_usage()
available_sensors = [t for t in SENSORS_CPU if t in available_cpu]
except AsusWrtError as exc:
_LOGGER.warning(
(
"Failed checking cpu sensor availability for ASUS router"
" %s. Exception: %s"
),
self.host,
exc,
)
return []
return available_sensors
async def _get_available_temperature_sensors(self) -> list[str]:
"""Check which temperature information is available on the router."""
try:
available_temps = await self._api.async_get_temperatures()
available_sensors = [
t for t in SENSORS_TEMPERATURES if t in available_temps
]
except AsusWrtError as exc:
_LOGGER.warning(
(
"Failed checking temperature sensor availability for ASUS router"
" %s. Exception: %s"
),
self.host,
exc,
)
return []
return available_sensors
async def _get_loadavg_sensors_availability(self) -> list[str]:
"""Check if load avg is available on the router."""
try:
await self._api.async_get_loadavg()
except AsusWrtNotAvailableInfoError:
return []
except AsusWrtError:
pass
return SENSORS_LOAD_AVG
@handle_errors_and_zip(AsusWrtError, SENSORS_BYTES)
async def _get_bytes(self) -> Any:
"""Fetch byte information from the router."""
return await self._api.async_get_traffic_bytes()
return await self._get_data(AsusData.NETWORK)
@handle_errors_and_zip(AsusWrtError, SENSORS_RATES)
async def _get_rates(self) -> Any:
"""Fetch rates information from the router."""
return await self._api.async_get_traffic_rates()
data = await self._get_data(AsusData.NETWORK)
# Convert from bits/s to Bytes/s for compatibility with legacy sensors
return {
key: (
value / 8
if key in SENSORS_RATES and isinstance(value, (int, float))
else value
)
for key, value in data.items()
}
@handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG)
async def _get_load_avg(self) -> Any:
"""Fetch cpu load avg information from the router."""
return await self._api.async_get_loadavg()
return await self._get_data(AsusData.SYSINFO)
@handle_errors_and_zip(AsusWrtError, None)
async def _get_temperatures(self) -> Any:
"""Fetch temperatures information from the router."""
return await self._api.async_get_temperatures()
return await self._get_data(AsusData.TEMPERATURE)
@handle_errors_and_zip(AsusWrtError, None)
async def _get_cpu_usage(self) -> Any:
"""Fetch cpu information from the router."""
return await self._api.async_get_cpu_usage()
return await self._get_data(AsusData.CPU)
@handle_errors_and_zip(AsusWrtError, None)
async def _get_memory_usage(self) -> Any:
"""Fetch memory information from the router."""
return await self._api.async_get_memory_usage()
return await self._get_data(AsusData.RAM)
async def _get_uptime(self) -> dict[str, Any]:
"""Fetch uptime from the router."""
try:
uptimes = await self._api.async_get_uptime()
except AsusWrtError as exc:
raise UpdateFailed(exc) from exc
last_boot = datetime.fromisoformat(uptimes["last_boot"])
uptime = uptimes["uptime"]
return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False))
return await self._get_data(AsusData.BOOTTIME)

View File

@@ -7,7 +7,7 @@ import os
import socket
from typing import Any, cast
from pyasuswrt import AsusWrtError
from asusrouter import AsusRouterError
import voluptuous as vol
from homeassistant.components.device_tracker import (
@@ -189,7 +189,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
try:
await api.async_connect()
except (AsusWrtError, OSError):
except (AsusRouterError, OSError):
_LOGGER.error(
"Error connecting to the AsusWrt router at %s using protocol %s",
host,

View File

@@ -0,0 +1,56 @@
"""Helpers for AsusWRT integration."""
from __future__ import annotations
from typing import Any, TypeVar
T = TypeVar("T", dict[str, Any], list[Any], None)
TRANSLATION_MAP = {
"wan_rx": "sensor_rx_bytes",
"wan_tx": "sensor_tx_bytes",
"total_usage": "cpu_total_usage",
"usage": "mem_usage_perc",
"free": "mem_free",
"used": "mem_used",
"wan_rx_speed": "sensor_rx_rates",
"wan_tx_speed": "sensor_tx_rates",
"2ghz": "2.4GHz",
"5ghz": "5.0GHz",
"5ghz2": "5.0GHz_2",
"6ghz": "6.0GHz",
"cpu": "CPU",
"datetime": "sensor_last_boot",
"uptime": "sensor_uptime",
**{f"{num}_usage": f"cpu{num}_usage" for num in range(1, 9)},
**{f"load_avg_{load}": f"sensor_load_avg{load}" for load in ("1", "5", "15")},
}
def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
"""Cleans dictionary from None values.
The `state` key is always preserved regardless of its value.
"""
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
def translate_to_legacy(raw: T) -> T:
"""Translate raw data to legacy format for dicts and lists."""
if raw is None:
return None
if isinstance(raw, dict):
return {TRANSLATION_MAP.get(k, k): v for k, v in raw.items()}
if isinstance(raw, list):
return [
TRANSLATION_MAP[item]
if isinstance(item, str) and item in TRANSLATION_MAP
else item
for item in raw
]
return raw

View File

@@ -1,11 +1,11 @@
{
"domain": "asuswrt",
"name": "ASUSWRT",
"codeowners": ["@kennedyshead", "@ollo69"],
"codeowners": ["@kennedyshead", "@ollo69", "@Vaskivskyi"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aioasuswrt", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.21"]
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.19.0"]
}

View File

@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
import logging
from typing import TYPE_CHECKING, Any
from pyasuswrt import AsusWrtError
from asusrouter import AsusRouterError
from homeassistant.components.device_tracker import (
CONF_CONSIDER_HOME,
@@ -229,7 +229,7 @@ class AsusWrtRouter:
"""Set up a AsusWrt router."""
try:
await self._api.async_connect()
except (AsusWrtError, OSError) as exc:
except (AsusRouterError, OSError) as exc:
raise ConfigEntryNotReady from exc
if not self._api.is_connected:
raise ConfigEntryNotReady
@@ -284,7 +284,7 @@ class AsusWrtRouter:
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
try:
wrt_devices = await self._api.async_get_connected_devices()
except (OSError, AsusWrtError) as exc:
except (OSError, AsusRouterError) as exc:
if not self._connect_error:
self._connect_error = True
_LOGGER.error(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/blue_current",
"iot_class": "cloud_push",
"loggers": ["bluecurrent_api"],
"requirements": ["bluecurrent-api==1.2.4"]
"requirements": ["bluecurrent-api==1.3.1"]
}

View File

@@ -16,7 +16,7 @@
"quality_scale": "internal",
"requirements": [
"bleak==1.0.1",
"bleak-retry-connector==4.0.0",
"bleak-retry-connector==4.0.1",
"bluetooth-adapters==2.0.0",
"bluetooth-auto-recovery==1.5.2",
"bluetooth-data-tools==1.28.2",

View File

@@ -69,12 +69,7 @@ class SHCEntity(SHCBaseEntity):
manufacturer=device.manufacturer,
model=device.device_model,
name=device.name,
via_device=(
DOMAIN,
device.parent_device_id
if device.parent_device_id is not None
else parent_id,
),
via_device=(DOMAIN, device.root_device_id),
)
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
"iot_class": "local_push",
"loggers": ["boschshcpy"],
"requirements": ["boschshcpy==0.2.91"],
"requirements": ["boschshcpy==0.2.107"],
"zeroconf": [
{
"type": "_http._tcp.local.",

View File

@@ -53,8 +53,7 @@ async def async_setup_entry(
assert unique_id is not None
async_add_entities(
BraviaTVButton(coordinator, unique_id, config_entry.title, description)
for description in BUTTONS
BraviaTVButton(coordinator, unique_id, description) for description in BUTTONS
)
@@ -67,11 +66,10 @@ class BraviaTVButton(BraviaTVEntity, ButtonEntity):
self,
coordinator: BraviaTVCoordinator,
unique_id: str,
model: str,
description: BraviaTVButtonDescription,
) -> None:
"""Initialize the button."""
super().__init__(coordinator, unique_id, model)
super().__init__(coordinator, unique_id)
self._attr_unique_id = f"{unique_id}_{description.key}"
self.entity_description = description

View File

@@ -79,14 +79,16 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
system_info = await self.client.get_system_info()
cid = system_info[ATTR_CID].lower()
title = system_info[ATTR_MODEL]
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
await self.async_set_unique_id(cid)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=title, data=self.device_config)
return self.async_create_entry(
title=f"{system_info['name']} {system_info[ATTR_MODEL]}",
data=self.device_config,
)
async def async_reauth_device(self) -> ConfigFlowResult:
"""Reauthorize Bravia TV device from config."""

View File

@@ -81,6 +81,7 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
self.system_info: dict[str, str] = {}
self.source: str | None = None
self.source_list: list[str] = []
self.source_map: dict[str, dict] = {}
@@ -150,6 +151,9 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
self.is_on = power_status == "active"
self.skipped_updates = 0
if not self.system_info:
self.system_info = await self.client.get_system_info()
if self.is_on is False:
return

View File

@@ -12,23 +12,16 @@ class BraviaTVEntity(CoordinatorEntity[BraviaTVCoordinator]):
_attr_has_entity_name = True
def __init__(
self,
coordinator: BraviaTVCoordinator,
unique_id: str,
model: str,
) -> None:
def __init__(self, coordinator: BraviaTVCoordinator, unique_id: str) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
connections={(CONNECTION_NETWORK_MAC, coordinator.system_info["macAddr"])},
manufacturer=ATTR_MANUFACTURER,
model=model,
name=f"{ATTR_MANUFACTURER} {model}",
model_id=coordinator.system_info["model"],
hw_version=coordinator.system_info["generation"],
serial_number=coordinator.system_info["serial"],
)
if coordinator.client.mac is not None:
self._attr_device_info["connections"] = {
(CONNECTION_NETWORK_MAC, coordinator.client.mac)
}

View File

@@ -34,9 +34,7 @@ async def async_setup_entry(
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities(
[BraviaTVMediaPlayer(coordinator, unique_id, config_entry.title)]
)
async_add_entities([BraviaTVMediaPlayer(coordinator, unique_id)])
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):

View File

@@ -24,7 +24,7 @@ async def async_setup_entry(
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities([BraviaTVRemote(coordinator, unique_id, config_entry.title)])
async_add_entities([BraviaTVRemote(coordinator, unique_id)])
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):

View File

@@ -255,7 +255,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
)
entity_description: ClimateEntityDescription
_attr_current_humidity: int | None = None
_attr_current_humidity: float | None = None
_attr_current_temperature: float | None = None
_attr_fan_mode: str | None
_attr_fan_modes: list[str] | None

View File

@@ -6,12 +6,16 @@ import asyncio
from collections.abc import Callable
from contextlib import suppress
from datetime import datetime, timedelta
from http import HTTPStatus
import logging
from typing import TYPE_CHECKING, Any
import aiohttp
from hass_nabucasa import Cloud, cloud_api
from hass_nabucasa import AlexaApiError, Cloud
from hass_nabucasa.alexa_api import (
AlexaAccessTokenDetails,
AlexaApiNeedsRelinkError,
AlexaApiNoTokenError,
)
from yarl import URL
from homeassistant.components import persistent_notification
@@ -146,7 +150,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
self._cloud_user = cloud_user
self._prefs = prefs
self._cloud = cloud
self._token = None
self._token: str | None = None
self._token_valid: datetime | None = None
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
self._alexa_sync_unsub: Callable[[], None] | None = None
@@ -318,32 +322,31 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
async def async_get_access_token(self) -> str | None:
"""Get an access token."""
details: AlexaAccessTokenDetails | None
if self._token_valid is not None and self._token_valid > utcnow():
return self._token
resp = await cloud_api.async_alexa_access_token(self._cloud)
body = await resp.json()
try:
details = await self._cloud.alexa_api.access_token()
except AlexaApiNeedsRelinkError as exception:
if self.should_report_state:
persistent_notification.async_create(
self.hass,
(
"There was an error reporting state to Alexa"
f" ({exception.reason}). Please re-link your Alexa skill via"
" the Alexa app to continue using it."
),
"Alexa state reporting disabled",
"cloud_alexa_report",
)
raise alexa_errors.RequireRelink from exception
except (AlexaApiNoTokenError, AlexaApiError) as exception:
raise alexa_errors.NoTokenAvailable from exception
if resp.status == HTTPStatus.BAD_REQUEST:
if body["reason"] in ("RefreshTokenNotFound", "UnknownRegion"):
if self.should_report_state:
persistent_notification.async_create(
self.hass,
(
"There was an error reporting state to Alexa"
f" ({body['reason']}). Please re-link your Alexa skill via"
" the Alexa app to continue using it."
),
"Alexa state reporting disabled",
"cloud_alexa_report",
)
raise alexa_errors.RequireRelink
raise alexa_errors.NoTokenAvailable
self._token = body["access_token"]
self._endpoint = body["event_endpoint"]
self._token_valid = utcnow() + timedelta(seconds=body["expires_in"])
self._token = details["access_token"]
self._endpoint = details["event_endpoint"]
self._token_valid = utcnow() + timedelta(seconds=details["expires_in"])
return self._token
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:

View File

@@ -7,7 +7,7 @@ from http import HTTPStatus
import logging
from typing import TYPE_CHECKING, Any
from hass_nabucasa import Cloud, cloud_api
from hass_nabucasa import Cloud
from hass_nabucasa.google_report_state import ErrorResponse
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
@@ -377,7 +377,7 @@ class CloudGoogleConfig(AbstractConfig):
return HTTPStatus.OK
async with self._sync_entities_lock:
resp = await cloud_api.async_google_actions_request_sync(self._cloud)
resp = await self._cloud.google_report_state.request_sync()
return resp.status
async def async_connect_agent_user(self, agent_user_id: str) -> None:

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==0.111.2"],
"requirements": ["hass-nabucasa==1.0.0"],
"single_config_entry": true
}

View File

@@ -7,22 +7,18 @@ import logging
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.util import Throttle
from .const import (
ACCOUNT_IS_VAULT,
API_ACCOUNT_AMOUNT,
API_ACCOUNT_AVALIABLE,
API_ACCOUNT_BALANCE,
API_ACCOUNT_CURRENCY,
API_ACCOUNT_CURRENCY_CODE,
API_ACCOUNT_HOLD,
API_ACCOUNT_ID,
API_ACCOUNT_NAME,
@@ -31,12 +27,9 @@ from .const import (
API_DATA,
API_RATES_CURRENCY,
API_RESOURCE_TYPE,
API_TYPE_VAULT,
API_V3_ACCOUNT_ID,
API_V3_TYPE_VAULT,
CONF_CURRENCIES,
CONF_EXCHANGE_BASE,
CONF_EXCHANGE_RATES,
)
_LOGGER = logging.getLogger(__name__)
@@ -51,9 +44,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
"""Set up Coinbase from a config entry."""
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
entry.async_on_unload(entry.add_update_listener(update_listener))
entry.runtime_data = instance
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
@@ -68,68 +58,28 @@ async def async_unload_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
"""Create and update a Coinbase Data instance."""
# Check if user is using deprecated v2 API credentials
if "organizations" not in entry.data[CONF_API_KEY]:
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
version = "v2"
else:
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
# Trigger reauthentication to ask user for v3 credentials
raise ConfigEntryAuthFailed(
"Your Coinbase API key appears to be for the deprecated v2 API. "
"Please reconfigure with a new API key created for the v3 API. "
"Visit https://www.coinbase.com/developer-platform to create new credentials."
)
version = "v3"
client = RESTClient(
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
)
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
instance = CoinbaseData(client, base_rate, version)
instance = CoinbaseData(client, base_rate)
instance.update()
return instance
async def update_listener(
hass: HomeAssistant, config_entry: CoinbaseConfigEntry
) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
registry = er.async_get(hass)
entities = er.async_entries_for_config_entry(registry, config_entry.entry_id)
# Remove orphaned entities
for entity in entities:
currency = entity.unique_id.split("-")[-1]
if (
"xe" in entity.unique_id
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
) or (
"wallet" in entity.unique_id
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
):
registry.async_remove(entity.entity_id)
def get_accounts(client, version):
def get_accounts(client):
"""Handle paginated accounts."""
response = client.get_accounts()
if version == "v2":
accounts = response[API_DATA]
next_starting_after = response.pagination.next_starting_after
while next_starting_after:
response = client.get_accounts(starting_after=next_starting_after)
accounts += response[API_DATA]
next_starting_after = response.pagination.next_starting_after
return [
{
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
API_ACCOUNT_CURRENCY_CODE
],
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
}
for account in accounts
]
accounts = response[API_ACCOUNTS]
while response["has_next"]:
response = client.get_accounts(cursor=response["cursor"])
@@ -153,37 +103,28 @@ def get_accounts(client, version):
class CoinbaseData:
"""Get the latest data and update the states."""
def __init__(self, client, exchange_base, version):
def __init__(self, client, exchange_base):
"""Init the coinbase data object."""
self.client = client
self.accounts = None
self.exchange_base = exchange_base
self.exchange_rates = None
if version == "v2":
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
else:
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
self.api_version = version
self.user_id = (
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from coinbase."""
try:
self.accounts = get_accounts(self.client, self.api_version)
if self.api_version == "v2":
self.exchange_rates = self.client.get_exchange_rates(
currency=self.exchange_base
)
else:
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except (AuthenticationError, HTTPError) as coinbase_error:
self.accounts = get_accounts(self.client)
self.exchange_rates = self.client.get(
"/v2/exchange-rates",
params={API_RATES_CURRENCY: self.exchange_base},
)[API_DATA]
except HTTPError as coinbase_error:
_LOGGER.error(
"Authentication error connecting to coinbase: %s", coinbase_error
)

View File

@@ -2,17 +2,20 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from coinbase.rest import RESTClient
from coinbase.rest.rest_base import HTTPError
from coinbase.wallet.client import Client as LegacyClient
from coinbase.wallet.error import AuthenticationError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
from homeassistant.config_entries import (
ConfigFlow,
ConfigFlowResult,
OptionsFlowWithReload,
)
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
@@ -45,9 +48,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
def get_user_from_client(api_key, api_token):
"""Get the user name from Coinbase API credentials."""
if "organizations" not in api_key:
client = LegacyClient(api_key, api_token)
return client.get_current_user()["name"]
client = RESTClient(api_key=api_key, api_secret=api_token)
return client.get_portfolios()["portfolios"][0]["name"]
@@ -59,7 +59,7 @@ async def validate_api(hass: HomeAssistant, data):
user = await hass.async_add_executor_job(
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
)
except (AuthenticationError, HTTPError) as error:
except HTTPError as error:
if "api key" in str(error) or " 401 Client Error" in str(error):
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
raise InvalidKey from error
@@ -74,8 +74,8 @@ async def validate_api(hass: HomeAssistant, data):
raise InvalidAuth from error
except ConnectionError as error:
raise CannotConnect from error
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
return {"title": user, "api_version": api_version}
return {"title": user}
async def validate_options(
@@ -85,20 +85,17 @@ async def validate_options(
client = config_entry.runtime_data.client
accounts = await hass.async_add_executor_job(
get_accounts, client, config_entry.data.get("api_version", "v2")
)
accounts = await hass.async_add_executor_job(get_accounts, client)
accounts_currencies = [
account[API_ACCOUNT_CURRENCY]
for account in accounts
if not account[ACCOUNT_IS_VAULT]
]
if config_entry.data.get("api_version", "v2") == "v2":
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
else:
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
available_rates = resp[API_DATA]
if CONF_CURRENCIES in options:
for currency in options[CONF_CURRENCIES]:
if currency not in accounts_currencies:
@@ -117,6 +114,8 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
reauth_entry: CoinbaseConfigEntry
async def async_step_user(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
@@ -143,12 +142,63 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
user_input[CONF_API_VERSION] = info["api_version"]
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication flow."""
self.reauth_entry = self._get_reauth_entry()
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, str] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
errors: dict[str, str] = {}
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
},
errors=errors,
)
try:
await validate_api(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidKey:
errors["base"] = "invalid_auth_key"
except InvalidSecret:
errors["base"] = "invalid_auth_secret"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
self.reauth_entry,
data_updates=user_input,
reason="reauth_successful",
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_USER_DATA_SCHEMA,
description_placeholders={
"account_name": self.reauth_entry.title,
},
errors=errors,
)
@staticmethod
@callback
def async_get_options_flow(
@@ -158,7 +208,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
return OptionsFlowHandler()
class OptionsFlowHandler(OptionsFlow):
class OptionsFlowHandler(OptionsFlowWithReload):
"""Handle a option flow for Coinbase."""
async def async_step_init(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/coinbase",
"iot_class": "cloud_polling",
"loggers": ["coinbase"],
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
"requirements": ["coinbase-advanced-py==1.2.2"]
}

View File

@@ -6,6 +6,7 @@ import logging
from homeassistant.components.sensor import SensorEntity, SensorStateClass
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -27,7 +28,6 @@ from .const import (
_LOGGER = logging.getLogger(__name__)
ATTR_NATIVE_BALANCE = "Balance in native currency"
ATTR_API_VERSION = "API Version"
CURRENCY_ICONS = {
"BTC": "mdi:currency-btc",
@@ -69,11 +69,26 @@ async def async_setup_entry(
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
)
# Remove orphaned entities
registry = er.async_get(hass)
existing_entities = er.async_entries_for_config_entry(
registry, config_entry.entry_id
)
for entity in existing_entities:
currency = entity.unique_id.split("-")[-1]
if (
"xe" in entity.unique_id
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
) or (
"wallet" in entity.unique_id
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
):
registry.async_remove(entity.entity_id)
for currency in desired_currencies:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
"Attempting to set up %s account sensor",
currency,
instance.api_version,
)
if currency not in provided_currencies:
_LOGGER.warning(
@@ -89,9 +104,8 @@ async def async_setup_entry(
if CONF_EXCHANGE_RATES in config_entry.options:
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
_LOGGER.debug(
"Attempting to set up %s account sensor with %s API",
"Attempting to set up %s exchange rate sensor",
rate,
instance.api_version,
)
entities.append(
ExchangeRateSensor(
@@ -146,15 +160,13 @@ class AccountSensor(SensorEntity):
"""Return the state attributes of the sensor."""
return {
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
ATTR_API_VERSION: self._coinbase_data.api_version,
}
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s account sensor with %s API",
"Updating %s account sensor",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
for account in self._coinbase_data.accounts:
@@ -210,9 +222,8 @@ class ExchangeRateSensor(SensorEntity):
def update(self) -> None:
"""Get the latest state of the sensor."""
_LOGGER.debug(
"Updating %s rate sensor with %s API",
"Updating %s rate sensor",
self._currency,
self._coinbase_data.api_version,
)
self._coinbase_data.update()
self._attr_native_value = round(

View File

@@ -8,6 +8,14 @@
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"
}
},
"reauth_confirm": {
"title": "Update Coinbase API credentials",
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]",
"api_token": "API secret"
}
}
},
"error": {
@@ -18,7 +26,8 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "Successfully updated credentials"
}
},
"options": {

View File

@@ -40,6 +40,7 @@ from .chat_log import (
ConverseError,
SystemContent,
ToolResultContent,
ToolResultContentDeltaDict,
UserContent,
async_get_chat_log,
)
@@ -79,6 +80,7 @@ __all__ = [
"ConverseError",
"SystemContent",
"ToolResultContent",
"ToolResultContentDeltaDict",
"UserContent",
"async_conversation_trace_append",
"async_converse",
@@ -117,7 +119,7 @@ CONFIG_SCHEMA = vol.Schema(
{cv.string: vol.All(cv.ensure_list, [cv.string])}
)
}
)
),
},
extra=vol.ALLOW_EXTRA,
)
@@ -268,8 +270,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
agent_config = config.get(DOMAIN, {})
await async_setup_default_agent(
hass, entity_component, config.get(DOMAIN, {}).get("intents", {})
hass, entity_component, config_intents=agent_config.get("intents", {})
)
async def handle_process(service: ServiceCall) -> ServiceResponse:

View File

@@ -9,7 +9,7 @@ from contextvars import ContextVar
from dataclasses import asdict, dataclass, field, replace
import logging
from pathlib import Path
from typing import Any, Literal, TypedDict
from typing import Any, Literal, TypedDict, cast
import voluptuous as vol
@@ -190,6 +190,15 @@ class AssistantContentDeltaDict(TypedDict, total=False):
native: Any
class ToolResultContentDeltaDict(TypedDict, total=False):
"""Tool result content."""
role: Literal["tool_result"]
tool_call_id: str
tool_name: str
tool_result: JsonObjectType
@dataclass
class ChatLog:
"""Class holding the chat history of a specific conversation."""
@@ -235,17 +244,25 @@ class ChatLog:
@callback
def async_add_assistant_content_without_tools(
self, content: AssistantContent
self, content: AssistantContent | ToolResultContent
) -> None:
"""Add assistant content to the log."""
"""Add assistant content to the log.
Allows assistant content without tool calls or with external tool calls only,
as well as tool results for the external tools.
"""
LOGGER.debug("Adding assistant content: %s", content)
if content.tool_calls is not None:
raise ValueError("Tool calls not allowed")
if (
isinstance(content, AssistantContent)
and content.tool_calls is not None
and any(not tool_call.external for tool_call in content.tool_calls)
):
raise ValueError("Non-external tool calls not allowed")
self.content.append(content)
async def async_add_assistant_content(
self,
content: AssistantContent,
content: AssistantContent | ToolResultContent,
/,
tool_call_tasks: dict[str, asyncio.Task] | None = None,
) -> AsyncGenerator[ToolResultContent]:
@@ -258,7 +275,11 @@ class ChatLog:
LOGGER.debug("Adding assistant content: %s", content)
self.content.append(content)
if content.tool_calls is None:
if (
not isinstance(content, AssistantContent)
or content.tool_calls is None
or all(tool_call.external for tool_call in content.tool_calls)
):
return
if self.llm_api is None:
@@ -267,13 +288,16 @@ class ChatLog:
if tool_call_tasks is None:
tool_call_tasks = {}
for tool_input in content.tool_calls:
if tool_input.id not in tool_call_tasks:
if tool_input.id not in tool_call_tasks and not tool_input.external:
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_input),
name=f"llm_tool_{tool_input.id}",
)
for tool_input in content.tool_calls:
if tool_input.external:
continue
LOGGER.debug(
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
)
@@ -296,7 +320,9 @@ class ChatLog:
yield response_content
async def async_add_delta_content_stream(
self, agent_id: str, stream: AsyncIterable[AssistantContentDeltaDict]
self,
agent_id: str,
stream: AsyncIterable[AssistantContentDeltaDict | ToolResultContentDeltaDict],
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
"""Stream content into the chat log.
@@ -320,30 +346,34 @@ class ChatLog:
# Indicates update to current message
if "role" not in delta:
if delta_content := delta.get("content"):
# ToolResultContentDeltaDict will always have a role
assistant_delta = cast(AssistantContentDeltaDict, delta)
if delta_content := assistant_delta.get("content"):
current_content += delta_content
if delta_thinking_content := delta.get("thinking_content"):
if delta_thinking_content := assistant_delta.get("thinking_content"):
current_thinking_content += delta_thinking_content
if delta_native := delta.get("native"):
if delta_native := assistant_delta.get("native"):
if current_native is not None:
raise RuntimeError(
"Native content already set, cannot overwrite"
)
current_native = delta_native
if delta_tool_calls := delta.get("tool_calls"):
if self.llm_api is None:
raise ValueError("No LLM API configured")
if delta_tool_calls := assistant_delta.get("tool_calls"):
current_tool_calls += delta_tool_calls
# Start processing the tool calls as soon as we know about them
for tool_call in delta_tool_calls:
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_call),
name=f"llm_tool_{tool_call.id}",
)
if not tool_call.external:
if self.llm_api is None:
raise ValueError("No LLM API configured")
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
self.llm_api.async_call_tool(tool_call),
name=f"llm_tool_{tool_call.id}",
)
if self.delta_listener:
if filtered_delta := {
k: v for k, v in delta.items() if k != "native"
k: v for k, v in assistant_delta.items() if k != "native"
}:
# We do not want to send the native content to the listener
# as it is not JSON serializable
@@ -351,10 +381,6 @@ class ChatLog:
continue
# Starting a new message
if delta["role"] != "assistant":
raise ValueError(f"Only assistant role expected. Got {delta['role']}")
# Yield the previous message if it has content
if (
current_content
@@ -362,7 +388,7 @@ class ChatLog:
or current_tool_calls
or current_native
):
content = AssistantContent(
content: AssistantContent | ToolResultContent = AssistantContent(
agent_id=agent_id,
content=current_content or None,
thinking_content=current_thinking_content or None,
@@ -376,14 +402,38 @@ class ChatLog:
yield tool_result
if self.delta_listener:
self.delta_listener(self, asdict(tool_result))
current_content = ""
current_thinking_content = ""
current_native = None
current_tool_calls = []
current_content = delta.get("content") or ""
current_thinking_content = delta.get("thinking_content") or ""
current_tool_calls = delta.get("tool_calls") or []
current_native = delta.get("native")
if delta["role"] == "assistant":
current_content = delta.get("content") or ""
current_thinking_content = delta.get("thinking_content") or ""
current_tool_calls = delta.get("tool_calls") or []
current_native = delta.get("native")
if self.delta_listener:
self.delta_listener(self, delta) # type: ignore[arg-type]
if self.delta_listener:
if filtered_delta := {
k: v for k, v in delta.items() if k != "native"
}:
self.delta_listener(self, filtered_delta)
elif delta["role"] == "tool_result":
content = ToolResultContent(
agent_id=agent_id,
tool_call_id=delta["tool_call_id"],
tool_name=delta["tool_name"],
tool_result=delta["tool_result"],
)
yield content
if self.delta_listener:
self.delta_listener(self, asdict(content))
self.async_add_assistant_content_without_tools(content)
else:
raise ValueError(
"Only assistant and tool_result roles expected."
f" Got {delta['role']}"
)
if (
current_content

View File

@@ -14,14 +14,19 @@ import re
import time
from typing import IO, Any, cast
from hassil.expression import Expression, ListReference, Sequence, TextChunk
from hassil.expression import Expression, Group, ListReference, TextChunk
from hassil.fuzzy import FuzzyNgramMatcher, SlotCombinationInfo
from hassil.intents import (
Intent,
IntentData,
Intents,
SlotList,
TextSlotList,
TextSlotValue,
WildcardSlotList,
)
from hassil.models import MatchEntity
from hassil.ngram import Sqlite3NgramModel
from hassil.recognize import (
MISSING_ENTITY,
RecognizeResult,
@@ -31,7 +36,15 @@ from hassil.recognize import (
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
from hassil.trie import Trie
from hassil.util import merge_dict
from home_assistant_intents import ErrorKey, get_intents, get_languages
from home_assistant_intents import (
ErrorKey,
FuzzyConfig,
FuzzyLanguageResponses,
get_fuzzy_config,
get_fuzzy_language,
get_intents,
get_languages,
)
import yaml
from homeassistant import core
@@ -76,6 +89,7 @@ TRIGGER_CALLBACK_TYPE = Callable[
]
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
ERROR_SENTINEL = object()
@@ -94,6 +108,8 @@ class LanguageIntents:
intent_responses: dict[str, Any]
error_responses: dict[str, Any]
language_variant: str | None
fuzzy_matcher: FuzzyNgramMatcher | None = None
fuzzy_responses: FuzzyLanguageResponses | None = None
@dataclass(slots=True)
@@ -119,10 +135,13 @@ class IntentMatchingStage(Enum):
EXPOSED_ENTITIES_ONLY = auto()
"""Match against exposed entities only."""
FUZZY = auto()
"""Use fuzzy matching to guess intent."""
UNEXPOSED_ENTITIES = auto()
"""Match against unexposed entities in Home Assistant."""
FUZZY = auto()
UNKNOWN_NAMES = auto()
"""Capture names that are not known to Home Assistant."""
@@ -241,6 +260,10 @@ class DefaultAgent(ConversationEntity):
# LRU cache to avoid unnecessary intent matching
self._intent_cache = IntentCache(capacity=128)
# Shared configuration for fuzzy matching
self.fuzzy_matching = True
self._fuzzy_config: FuzzyConfig | None = None
@property
def supported_languages(self) -> list[str]:
"""Return a list of supported languages."""
@@ -299,7 +322,7 @@ class DefaultAgent(ConversationEntity):
_LOGGER.warning("No intents were loaded for language: %s", language)
return None
slot_lists = self._make_slot_lists()
slot_lists = await self._make_slot_lists()
intent_context = self._make_intent_context(user_input)
if self._exposed_names_trie is not None:
@@ -556,6 +579,36 @@ class DefaultAgent(ConversationEntity):
# Don't try matching against all entities or doing a fuzzy match
return None
# Use fuzzy matching
skip_fuzzy_match = False
if cache_value is not None:
if (cache_value.result is not None) and (
cache_value.stage == IntentMatchingStage.FUZZY
):
_LOGGER.debug("Got cached result for fuzzy match")
return cache_value.result
# Continue with matching, but we know we won't succeed for fuzzy
# match.
skip_fuzzy_match = True
if (not skip_fuzzy_match) and self.fuzzy_matching:
start_time = time.monotonic()
fuzzy_result = self._recognize_fuzzy(lang_intents, user_input)
# Update cache
self._intent_cache.put(
cache_key,
IntentCacheValue(result=fuzzy_result, stage=IntentMatchingStage.FUZZY),
)
_LOGGER.debug(
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
)
if fuzzy_result is not None:
return fuzzy_result
# Try again with all entities (including unexposed)
skip_unexposed_entities_match = False
if cache_value is not None:
@@ -601,102 +654,160 @@ class DefaultAgent(ConversationEntity):
# This should fail the intent handling phase (async_match_targets).
return strict_result
# Try again with missing entities enabled
skip_fuzzy_match = False
# Check unknown names
skip_unknown_names = False
if cache_value is not None:
if (cache_value.result is not None) and (
cache_value.stage == IntentMatchingStage.FUZZY
cache_value.stage == IntentMatchingStage.UNKNOWN_NAMES
):
_LOGGER.debug("Got cached result for fuzzy match")
_LOGGER.debug("Got cached result for unknown names")
return cache_value.result
# We know we won't succeed for fuzzy matching.
skip_fuzzy_match = True
skip_unknown_names = True
maybe_result: RecognizeResult | None = None
if not skip_fuzzy_match:
if not skip_unknown_names:
start_time = time.monotonic()
best_num_matched_entities = 0
best_num_unmatched_entities = 0
best_num_unmatched_ranges = 0
for result in recognize_all(
user_input.text,
lang_intents.intents,
slot_lists=slot_lists,
intent_context=intent_context,
allow_unmatched_entities=True,
):
if result.text_chunks_matched < 1:
# Skip results that don't match any literal text
continue
# Don't count missing entities that couldn't be filled from context
num_matched_entities = 0
for matched_entity in result.entities_list:
if matched_entity.name not in result.unmatched_entities:
num_matched_entities += 1
num_unmatched_entities = 0
num_unmatched_ranges = 0
for unmatched_entity in result.unmatched_entities_list:
if isinstance(unmatched_entity, UnmatchedTextEntity):
if unmatched_entity.text != MISSING_ENTITY:
num_unmatched_entities += 1
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
num_unmatched_ranges += 1
num_unmatched_entities += 1
else:
num_unmatched_entities += 1
if (
(maybe_result is None) # first result
or (
# More literal text matched
result.text_chunks_matched > maybe_result.text_chunks_matched
)
or (
# More entities matched
num_matched_entities > best_num_matched_entities
)
or (
# Fewer unmatched entities
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities < best_num_unmatched_entities)
)
or (
# Prefer unmatched ranges
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges > best_num_unmatched_ranges)
)
or (
# Prefer match failures with entities
(result.text_chunks_matched == maybe_result.text_chunks_matched)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges == best_num_unmatched_ranges)
and (
("name" in result.entities)
or ("name" in result.unmatched_entities)
)
)
):
maybe_result = result
best_num_matched_entities = num_matched_entities
best_num_unmatched_entities = num_unmatched_entities
best_num_unmatched_ranges = num_unmatched_ranges
maybe_result = self._recognize_unknown_names(
lang_intents, user_input, slot_lists, intent_context
)
# Update cache
self._intent_cache.put(
cache_key,
IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY),
IntentCacheValue(
result=maybe_result, stage=IntentMatchingStage.UNKNOWN_NAMES
),
)
_LOGGER.debug(
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
"Did unknown names match in %s second(s)", time.monotonic() - start_time
)
return maybe_result
def _recognize_fuzzy(
self, lang_intents: LanguageIntents, user_input: ConversationInput
) -> RecognizeResult | None:
"""Return fuzzy recognition from hassil."""
if lang_intents.fuzzy_matcher is None:
return None
fuzzy_result = lang_intents.fuzzy_matcher.match(user_input.text)
if fuzzy_result is None:
return None
response = "default"
if lang_intents.fuzzy_responses:
domain = "" # no domain
if "name" in fuzzy_result.slots:
domain = fuzzy_result.name_domain
elif "domain" in fuzzy_result.slots:
domain = fuzzy_result.slots["domain"].value
slot_combo = tuple(sorted(fuzzy_result.slots))
if (
intent_responses := lang_intents.fuzzy_responses.get(
fuzzy_result.intent_name
)
) and (combo_responses := intent_responses.get(slot_combo)):
response = combo_responses.get(domain, response)
entities = [
MatchEntity(name=slot_name, value=slot_value.value, text=slot_value.text)
for slot_name, slot_value in fuzzy_result.slots.items()
]
return RecognizeResult(
intent=Intent(name=fuzzy_result.intent_name),
intent_data=IntentData(sentence_texts=[]),
intent_metadata={METADATA_FUZZY_MATCH: True},
entities={entity.name: entity for entity in entities},
entities_list=entities,
response=response,
)
def _recognize_unknown_names(
self,
lang_intents: LanguageIntents,
user_input: ConversationInput,
slot_lists: dict[str, SlotList],
intent_context: dict[str, Any] | None,
) -> RecognizeResult | None:
"""Return result with unknown names for an error message."""
maybe_result: RecognizeResult | None = None
best_num_matched_entities = 0
best_num_unmatched_entities = 0
best_num_unmatched_ranges = 0
for result in recognize_all(
user_input.text,
lang_intents.intents,
slot_lists=slot_lists,
intent_context=intent_context,
allow_unmatched_entities=True,
):
if result.text_chunks_matched < 1:
# Skip results that don't match any literal text
continue
# Don't count missing entities that couldn't be filled from context
num_matched_entities = 0
for matched_entity in result.entities_list:
if matched_entity.name not in result.unmatched_entities:
num_matched_entities += 1
num_unmatched_entities = 0
num_unmatched_ranges = 0
for unmatched_entity in result.unmatched_entities_list:
if isinstance(unmatched_entity, UnmatchedTextEntity):
if unmatched_entity.text != MISSING_ENTITY:
num_unmatched_entities += 1
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
num_unmatched_ranges += 1
num_unmatched_entities += 1
else:
num_unmatched_entities += 1
if (
(maybe_result is None) # first result
or (
# More literal text matched
result.text_chunks_matched > maybe_result.text_chunks_matched
)
or (
# More entities matched
num_matched_entities > best_num_matched_entities
)
or (
# Fewer unmatched entities
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities < best_num_unmatched_entities)
)
or (
# Prefer unmatched ranges
(num_matched_entities == best_num_matched_entities)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges > best_num_unmatched_ranges)
)
or (
# Prefer match failures with entities
(result.text_chunks_matched == maybe_result.text_chunks_matched)
and (num_unmatched_entities == best_num_unmatched_entities)
and (num_unmatched_ranges == best_num_unmatched_ranges)
and (
("name" in result.entities)
or ("name" in result.unmatched_entities)
)
)
):
maybe_result = result
best_num_matched_entities = num_matched_entities
best_num_unmatched_entities = num_unmatched_entities
best_num_unmatched_ranges = num_unmatched_ranges
return maybe_result
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
"""Get filtered slot list with unexposed entity names in Home Assistant."""
if self._unexposed_names_trie is None:
@@ -851,7 +962,7 @@ class DefaultAgent(ConversationEntity):
if lang_intents is None:
return
self._make_slot_lists()
await self._make_slot_lists()
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
"""Load all intents of a language with lock."""
@@ -1002,12 +1113,85 @@ class DefaultAgent(ConversationEntity):
intent_responses = responses_dict.get("intents", {})
error_responses = responses_dict.get("errors", {})
if not self.fuzzy_matching:
_LOGGER.debug("Fuzzy matching is disabled")
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
)
# Load fuzzy
fuzzy_info = get_fuzzy_language(language_variant, json_load=json_load)
if fuzzy_info is None:
_LOGGER.debug(
"Fuzzy matching not available for language: %s", language_variant
)
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
)
if self._fuzzy_config is None:
# Load shared config
self._fuzzy_config = get_fuzzy_config(json_load=json_load)
_LOGGER.debug("Loaded shared fuzzy matching config")
assert self._fuzzy_config is not None
fuzzy_matcher: FuzzyNgramMatcher | None = None
fuzzy_responses: FuzzyLanguageResponses | None = None
start_time = time.monotonic()
fuzzy_responses = fuzzy_info.responses
fuzzy_matcher = FuzzyNgramMatcher(
intents=intents,
intent_models={
intent_name: Sqlite3NgramModel(
order=fuzzy_model.order,
words={
word: str(word_id)
for word, word_id in fuzzy_model.words.items()
},
database_path=fuzzy_model.database_path,
)
for intent_name, fuzzy_model in fuzzy_info.ngram_models.items()
},
intent_slot_list_names=self._fuzzy_config.slot_list_names,
slot_combinations={
intent_name: {
combo_key: [
SlotCombinationInfo(
name_domains=(set(name_domains) if name_domains else None)
)
]
for combo_key, name_domains in intent_combos.items()
}
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()
},
domain_keywords=fuzzy_info.domain_keywords,
stop_words=fuzzy_info.stop_words,
)
_LOGGER.debug(
"Loaded fuzzy matcher in %s second(s): language=%s, intents=%s",
time.monotonic() - start_time,
language_variant,
sorted(fuzzy_matcher.intent_models.keys()),
)
return LanguageIntents(
intents,
intents_dict,
intent_responses,
error_responses,
language_variant,
fuzzy_matcher=fuzzy_matcher,
fuzzy_responses=fuzzy_responses,
)
@core.callback
@@ -1027,8 +1211,7 @@ class DefaultAgent(ConversationEntity):
# Slot lists have changed, so we must clear the cache
self._intent_cache.clear()
@core.callback
def _make_slot_lists(self) -> dict[str, SlotList]:
async def _make_slot_lists(self) -> dict[str, SlotList]:
"""Create slot lists with areas and entity names/aliases."""
if self._slot_lists is not None:
return self._slot_lists
@@ -1089,6 +1272,10 @@ class DefaultAgent(ConversationEntity):
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
}
# Reload fuzzy matchers with new slot lists
if self.fuzzy_matching:
await self.hass.async_add_executor_job(self._load_fuzzy_matchers)
self._listen_clear_slot_list()
_LOGGER.debug(
@@ -1098,6 +1285,25 @@ class DefaultAgent(ConversationEntity):
return self._slot_lists
def _load_fuzzy_matchers(self) -> None:
"""Reload fuzzy matchers for all loaded languages."""
for lang_intents in self._lang_intents.values():
if (not isinstance(lang_intents, LanguageIntents)) or (
lang_intents.fuzzy_matcher is None
):
continue
lang_matcher = lang_intents.fuzzy_matcher
lang_intents.fuzzy_matcher = FuzzyNgramMatcher(
intents=lang_matcher.intents,
intent_models=lang_matcher.intent_models,
intent_slot_list_names=lang_matcher.intent_slot_list_names,
slot_combinations=lang_matcher.slot_combinations,
domain_keywords=lang_matcher.domain_keywords,
stop_words=lang_matcher.stop_words,
slot_lists=self._slot_lists,
)
def _make_intent_context(
self, user_input: ConversationInput
) -> dict[str, Any] | None:
@@ -1183,7 +1389,7 @@ class DefaultAgent(ConversationEntity):
for trigger_intent in trigger_intents.intents.values():
for intent_data in trigger_intent.data:
for sentence in intent_data.sentences:
_collect_list_references(sentence, wildcard_names)
_collect_list_references(sentence.expression, wildcard_names)
for wildcard_name in wildcard_names:
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
@@ -1520,11 +1726,9 @@ def _get_match_error_response(
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
"""Collect list reference names recursively."""
if isinstance(expression, Sequence):
seq: Sequence = expression
for item in seq.items:
if isinstance(expression, Group):
for item in expression.items:
_collect_list_references(item, list_names)
elif isinstance(expression, ListReference):
# {list}
list_ref: ListReference = expression
list_names.add(list_ref.slot_name)
list_names.add(expression.slot_name)

View File

@@ -26,7 +26,11 @@ from .agent_manager import (
get_agent_manager,
)
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE
from .default_agent import (
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
METADATA_FUZZY_MATCH,
)
from .entity import ConversationEntity
from .models import ConversationInput
@@ -240,6 +244,8 @@ async def websocket_hass_agent_debug(
"sentence_template": "",
# When match is incomplete, this will contain the best slot guesses
"unmatched_slots": _get_unmatched_slots(intent_result),
# True if match was not exact
"fuzzy_match": False,
}
if successful_match:
@@ -251,16 +257,19 @@ async def websocket_hass_agent_debug(
if intent_result.intent_sentence is not None:
result_dict["sentence_template"] = intent_result.intent_sentence.text
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata and intent_result.intent_metadata.get(
METADATA_CUSTOM_SENTENCE
):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
if intent_result.intent_metadata:
# Inspect metadata to determine if this matched a custom sentence
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
result_dict["source"] = "custom"
result_dict["file"] = intent_result.intent_metadata.get(
METADATA_CUSTOM_FILE
)
else:
result_dict["source"] = "builtin"
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
METADATA_FUZZY_MATCH, False
)
else:
result_dict["source"] = "builtin"
result_dicts.append(result_dict)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.7.30"]
"requirements": ["hassil==3.1.0", "home-assistant-intents==2025.7.30"]
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["cookidoo_api"],
"quality_scale": "silver",
"requirements": ["cookidoo-api==0.12.2"]
"requirements": ["cookidoo-api==0.14.0"]
}

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from datetime import datetime
from typing import Any
from homeassistant.components import media_source
from homeassistant.components.media_player import (
BrowseMedia,
MediaClass,
@@ -396,6 +397,15 @@ class DemoBrowsePlayer(AbstractDemoPlayer):
_attr_supported_features = BROWSE_PLAYER_SUPPORT
async def async_browse_media(
self,
media_content_type: MediaType | str | None = None,
media_content_id: str | None = None,
) -> BrowseMedia:
"""Implement the websocket media browsing helper."""
return await media_source.async_browse_media(self.hass, media_content_id)
class DemoGroupPlayer(AbstractDemoPlayer):
"""A Demo media player that supports grouping."""

View File

@@ -15,7 +15,7 @@
],
"quality_scale": "internal",
"requirements": [
"aiodhcpwatcher==1.2.0",
"aiodhcpwatcher==1.2.1",
"aiodiscover==2.7.1",
"cached-ipaddress==0.10.0"
]

View File

@@ -30,6 +30,7 @@ class Dremel3DPrinterEntity(CoordinatorEntity[Dremel3DPrinterDataUpdateCoordinat
"""Return device information about this Dremel printer."""
return DeviceInfo(
identifiers={(DOMAIN, self._api.get_serial_number())},
serial_number=self._api.get_serial_number(),
manufacturer=self._api.get_manufacturer(),
model=self._api.get_model(),
name=self._api.get_title(),

View File

@@ -93,6 +93,7 @@ class EmonitorPowerSensor(CoordinatorEntity[EmonitorStatus], SensorEntity):
manufacturer="Powerhouse Dynamics, Inc.",
name=device_name,
sw_version=emonitor_status.hardware.firmware_version,
serial_number=emonitor_status.hardware.serial_number,
)
self._attr_extra_state_attributes = {"channel": channel_number}
self._attr_native_value = self._paired_attr(self.entity_description.key)

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from pyenphase import Envoy
from homeassistant.const import CONF_HOST
@@ -42,6 +44,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: EnphaseConfigEntry) -> b
},
)
# register envoy before via_device is used
device_registry = dr.async_get(hass)
if TYPE_CHECKING:
assert envoy.serial_number
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, envoy.serial_number)},
manufacturer="Enphase",
name=coordinator.name,
model=envoy.envoy_model,
sw_version=str(envoy.firmware),
hw_version=envoy.part_number,
serial_number=envoy.serial_number,
)
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)

View File

@@ -6,7 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from operator import attrgetter
from pyenphase import EnvoyEncharge, EnvoyEnpower
from pyenphase import EnvoyC6CC, EnvoyCollar, EnvoyEncharge, EnvoyEnpower
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -72,6 +72,42 @@ ENPOWER_SENSORS = (
)
@dataclass(frozen=True, kw_only=True)
class EnvoyCollarBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes an Envoy IQ Meter Collar binary sensor entity."""
value_fn: Callable[[EnvoyCollar], bool]
COLLAR_SENSORS = (
EnvoyCollarBinarySensorEntityDescription(
key="communicating",
translation_key="communicating",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=attrgetter("communicating"),
),
)
@dataclass(frozen=True, kw_only=True)
class EnvoyC6CCBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes an C6 Combiner controller binary sensor entity."""
value_fn: Callable[[EnvoyC6CC], bool]
C6CC_SENSORS = (
EnvoyC6CCBinarySensorEntityDescription(
key="communicating",
translation_key="communicating",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=attrgetter("communicating"),
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: EnphaseConfigEntry,
@@ -95,6 +131,18 @@ async def async_setup_entry(
for description in ENPOWER_SENSORS
)
if envoy_data.collar:
entities.extend(
EnvoyCollarBinarySensorEntity(coordinator, description)
for description in COLLAR_SENSORS
)
if envoy_data.c6cc:
entities.extend(
EnvoyC6CCBinarySensorEntity(coordinator, description)
for description in C6CC_SENSORS
)
async_add_entities(entities)
@@ -168,3 +216,69 @@ class EnvoyEnpowerBinarySensorEntity(EnvoyBaseBinarySensorEntity):
enpower = self.data.enpower
assert enpower is not None
return self.entity_description.value_fn(enpower)
class EnvoyCollarBinarySensorEntity(EnvoyBaseBinarySensorEntity):
"""Defines an IQ Meter Collar binary_sensor entity."""
entity_description: EnvoyCollarBinarySensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyCollarBinarySensorEntityDescription,
) -> None:
"""Init the Collar base entity."""
super().__init__(coordinator, description)
collar_data = self.data.collar
assert collar_data is not None
self._attr_unique_id = f"{collar_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, collar_data.serial_number)},
manufacturer="Enphase",
model="IQ Meter Collar",
name=f"Collar {collar_data.serial_number}",
sw_version=str(collar_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=collar_data.serial_number,
)
@property
def is_on(self) -> bool:
"""Return the state of the Collar binary_sensor."""
collar_data = self.data.collar
assert collar_data is not None
return self.entity_description.value_fn(collar_data)
class EnvoyC6CCBinarySensorEntity(EnvoyBaseBinarySensorEntity):
"""Defines an C6 Combiner binary_sensor entity."""
entity_description: EnvoyC6CCBinarySensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyC6CCBinarySensorEntityDescription,
) -> None:
"""Init the C6 Combiner base entity."""
super().__init__(coordinator, description)
c6cc_data = self.data.c6cc
assert c6cc_data is not None
self._attr_unique_id = f"{c6cc_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, c6cc_data.serial_number)},
manufacturer="Enphase",
model="C6 COMBINER CONTROLLER",
name=f"C6 Combiner {c6cc_data.serial_number}",
sw_version=str(c6cc_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=c6cc_data.serial_number,
)
@property
def is_on(self) -> bool:
"""Return the state of the C6 Combiner binary_sensor."""
c6cc_data = self.data.c6cc
assert c6cc_data is not None
return self.entity_description.value_fn(c6cc_data)

View File

@@ -1,13 +1,13 @@
{
"domain": "enphase_envoy",
"name": "Enphase Envoy",
"codeowners": ["@bdraco", "@cgarwood", "@joostlek", "@catsmanac"],
"codeowners": ["@bdraco", "@cgarwood", "@catsmanac"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/enphase_envoy",
"iot_class": "local_polling",
"loggers": ["pyenphase"],
"quality_scale": "platinum",
"requirements": ["pyenphase==2.2.3"],
"requirements": ["pyenphase==2.3.0"],
"zeroconf": [
{
"type": "_enphase-envoy._tcp.local."

View File

@@ -12,6 +12,8 @@ from typing import TYPE_CHECKING
from pyenphase import (
EnvoyACBPower,
EnvoyBatteryAggregate,
EnvoyC6CC,
EnvoyCollar,
EnvoyEncharge,
EnvoyEnchargeAggregate,
EnvoyEnchargePower,
@@ -790,6 +792,58 @@ ENPOWER_SENSORS = (
)
@dataclass(frozen=True, kw_only=True)
class EnvoyCollarSensorEntityDescription(SensorEntityDescription):
"""Describes an Envoy Collar sensor entity."""
value_fn: Callable[[EnvoyCollar], datetime.datetime | int | float | str]
COLLAR_SENSORS = (
EnvoyCollarSensorEntityDescription(
key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
value_fn=attrgetter("temperature"),
),
EnvoyCollarSensorEntityDescription(
key=LAST_REPORTED_KEY,
translation_key=LAST_REPORTED_KEY,
native_unit_of_measurement=None,
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda collar: dt_util.utc_from_timestamp(collar.last_report_date),
),
EnvoyCollarSensorEntityDescription(
key="grid_state",
translation_key="grid_status",
value_fn=lambda collar: collar.grid_state,
),
EnvoyCollarSensorEntityDescription(
key="mid_state",
translation_key="mid_state",
value_fn=lambda collar: collar.mid_state,
),
)
@dataclass(frozen=True, kw_only=True)
class EnvoyC6CCSensorEntityDescription(SensorEntityDescription):
"""Describes an Envoy C6 Combiner controller sensor entity."""
value_fn: Callable[[EnvoyC6CC], datetime.datetime]
C6CC_SENSORS = (
EnvoyC6CCSensorEntityDescription(
key=LAST_REPORTED_KEY,
translation_key=LAST_REPORTED_KEY,
native_unit_of_measurement=None,
device_class=SensorDeviceClass.TIMESTAMP,
value_fn=lambda c6cc: dt_util.utc_from_timestamp(c6cc.last_report_date),
),
)
@dataclass(frozen=True)
class EnvoyEnchargeAggregateRequiredKeysMixin:
"""Mixin for required keys."""
@@ -1050,6 +1104,15 @@ async def async_setup_entry(
AggregateBatteryEntity(coordinator, description)
for description in AGGREGATE_BATTERY_SENSORS
)
if envoy_data.collar:
entities.extend(
EnvoyCollarEntity(coordinator, description)
for description in COLLAR_SENSORS
)
if envoy_data.c6cc:
entities.extend(
EnvoyC6CCEntity(coordinator, description) for description in C6CC_SENSORS
)
async_add_entities(entities)
@@ -1488,3 +1551,70 @@ class AggregateBatteryEntity(EnvoySystemSensorEntity):
battery_aggregate = self.data.battery_aggregate
assert battery_aggregate is not None
return self.entity_description.value_fn(battery_aggregate)
class EnvoyCollarEntity(EnvoySensorBaseEntity):
"""Envoy Collar sensor entity."""
entity_description: EnvoyCollarSensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyCollarSensorEntityDescription,
) -> None:
"""Initialize Collar entity."""
super().__init__(coordinator, description)
collar_data = self.data.collar
assert collar_data is not None
self._serial_number = collar_data.serial_number
self._attr_unique_id = f"{collar_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, collar_data.serial_number)},
manufacturer="Enphase",
model="IQ Meter Collar",
name=f"Collar {collar_data.serial_number}",
sw_version=str(collar_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=collar_data.serial_number,
)
@property
def native_value(self) -> datetime.datetime | int | float | str:
"""Return the state of the collar sensors."""
collar_data = self.data.collar
assert collar_data is not None
return self.entity_description.value_fn(collar_data)
class EnvoyC6CCEntity(EnvoySensorBaseEntity):
"""Envoy C6CC sensor entity."""
entity_description: EnvoyC6CCSensorEntityDescription
def __init__(
self,
coordinator: EnphaseUpdateCoordinator,
description: EnvoyC6CCSensorEntityDescription,
) -> None:
"""Initialize Encharge entity."""
super().__init__(coordinator, description)
c6cc_data = self.data.c6cc
assert c6cc_data is not None
self._attr_unique_id = f"{c6cc_data.serial_number}_{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, c6cc_data.serial_number)},
manufacturer="Enphase",
model="C6 COMBINER CONTROLLER",
name=f"C6 Combiner {c6cc_data.serial_number}",
sw_version=str(c6cc_data.firmware_version),
via_device=(DOMAIN, self.envoy_serial_num),
serial_number=c6cc_data.serial_number,
)
@property
def native_value(self) -> datetime.datetime:
"""Return the state of the c6cc inventory sensors."""
c6cc_data = self.data.c6cc
assert c6cc_data is not None
return self.entity_description.value_fn(c6cc_data)

View File

@@ -407,6 +407,12 @@
},
"last_report_duration": {
"name": "Last report duration"
},
"grid_status": {
"name": "[%key:component::enphase_envoy::entity::binary_sensor::grid_status::name%]"
},
"mid_state": {
"name": "MID state"
}
},
"switch": {

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
import base64
from functools import partial
import logging
@@ -15,7 +14,6 @@ from aioesphomeapi import (
APIVersion,
DeviceInfo as EsphomeDeviceInfo,
EncryptionPlaintextAPIError,
EntityInfo,
HomeassistantServiceCall,
InvalidAuthAPIError,
InvalidEncryptionKeyAPIError,
@@ -63,7 +61,6 @@ from homeassistant.helpers.issue_registry import (
)
from homeassistant.helpers.service import async_set_service_schema
from homeassistant.helpers.template import Template
from homeassistant.util.async_ import create_eager_task
from .bluetooth import async_connect_scanner
from .const import (
@@ -425,14 +422,7 @@ class ESPHomeManager:
unique_id_is_mac_address = unique_id and ":" in unique_id
if entry.options.get(CONF_SUBSCRIBE_LOGS):
self._async_subscribe_logs(self._async_get_equivalent_log_level())
results = await asyncio.gather(
create_eager_task(cli.device_info()),
create_eager_task(cli.list_entities_services()),
)
device_info: EsphomeDeviceInfo = results[0]
entity_infos_services: tuple[list[EntityInfo], list[UserService]] = results[1]
entity_infos, services = entity_infos_services
device_info, entity_infos, services = await cli.device_info_and_list_entities()
device_mac = format_mac(device_info.mac_address)
mac_address_matches = unique_id == device_mac
@@ -564,11 +554,11 @@ class ESPHomeManager:
)
entry_data.loaded_platforms.add(Platform.ASSIST_SATELLITE)
cli.subscribe_states(entry_data.async_update_state)
cli.subscribe_service_calls(self.async_on_service_call)
cli.subscribe_home_assistant_states(
self.async_on_state_subscription,
self.async_on_state_request,
cli.subscribe_home_assistant_states_and_services(
on_state=entry_data.async_update_state,
on_service_call=self.async_on_service_call,
on_state_sub=self.async_on_state_subscription,
on_state_request=self.async_on_state_request,
)
entry_data.async_save_to_store()

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==37.2.2",
"aioesphomeapi==39.0.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.1.0"
],

View File

@@ -10,6 +10,7 @@ from urllib.parse import urlparse
from aioesphomeapi import (
EntityInfo,
MediaPlayerCommand,
MediaPlayerEntityFeature as EspMediaPlayerEntityFeature,
MediaPlayerEntityState,
MediaPlayerFormatPurpose,
MediaPlayerInfo,
@@ -50,9 +51,36 @@ _STATES: EsphomeEnumMapper[EspMediaPlayerState, MediaPlayerState] = EsphomeEnumM
EspMediaPlayerState.IDLE: MediaPlayerState.IDLE,
EspMediaPlayerState.PLAYING: MediaPlayerState.PLAYING,
EspMediaPlayerState.PAUSED: MediaPlayerState.PAUSED,
EspMediaPlayerState.OFF: MediaPlayerState.OFF,
EspMediaPlayerState.ON: MediaPlayerState.ON,
}
)
_FEATURES = {
EspMediaPlayerEntityFeature.PAUSE: MediaPlayerEntityFeature.PAUSE,
EspMediaPlayerEntityFeature.SEEK: MediaPlayerEntityFeature.SEEK,
EspMediaPlayerEntityFeature.VOLUME_SET: MediaPlayerEntityFeature.VOLUME_SET,
EspMediaPlayerEntityFeature.VOLUME_MUTE: MediaPlayerEntityFeature.VOLUME_MUTE,
EspMediaPlayerEntityFeature.PREVIOUS_TRACK: MediaPlayerEntityFeature.PREVIOUS_TRACK,
EspMediaPlayerEntityFeature.NEXT_TRACK: MediaPlayerEntityFeature.NEXT_TRACK,
EspMediaPlayerEntityFeature.TURN_ON: MediaPlayerEntityFeature.TURN_ON,
EspMediaPlayerEntityFeature.TURN_OFF: MediaPlayerEntityFeature.TURN_OFF,
EspMediaPlayerEntityFeature.PLAY_MEDIA: MediaPlayerEntityFeature.PLAY_MEDIA,
EspMediaPlayerEntityFeature.VOLUME_STEP: MediaPlayerEntityFeature.VOLUME_STEP,
EspMediaPlayerEntityFeature.SELECT_SOURCE: MediaPlayerEntityFeature.SELECT_SOURCE,
EspMediaPlayerEntityFeature.STOP: MediaPlayerEntityFeature.STOP,
EspMediaPlayerEntityFeature.CLEAR_PLAYLIST: MediaPlayerEntityFeature.CLEAR_PLAYLIST,
EspMediaPlayerEntityFeature.PLAY: MediaPlayerEntityFeature.PLAY,
EspMediaPlayerEntityFeature.SHUFFLE_SET: MediaPlayerEntityFeature.SHUFFLE_SET,
EspMediaPlayerEntityFeature.SELECT_SOUND_MODE: MediaPlayerEntityFeature.SELECT_SOUND_MODE,
EspMediaPlayerEntityFeature.BROWSE_MEDIA: MediaPlayerEntityFeature.BROWSE_MEDIA,
EspMediaPlayerEntityFeature.REPEAT_SET: MediaPlayerEntityFeature.REPEAT_SET,
EspMediaPlayerEntityFeature.GROUPING: MediaPlayerEntityFeature.GROUPING,
EspMediaPlayerEntityFeature.MEDIA_ANNOUNCE: MediaPlayerEntityFeature.MEDIA_ANNOUNCE,
EspMediaPlayerEntityFeature.MEDIA_ENQUEUE: MediaPlayerEntityFeature.MEDIA_ENQUEUE,
EspMediaPlayerEntityFeature.SEARCH_MEDIA: MediaPlayerEntityFeature.SEARCH_MEDIA,
}
ATTR_BYPASS_PROXY = "bypass_proxy"
@@ -67,16 +95,12 @@ class EsphomeMediaPlayer(
def _on_static_info_update(self, static_info: EntityInfo) -> None:
"""Set attrs from static info."""
super()._on_static_info_update(static_info)
flags = (
MediaPlayerEntityFeature.PLAY_MEDIA
| MediaPlayerEntityFeature.BROWSE_MEDIA
| MediaPlayerEntityFeature.STOP
| MediaPlayerEntityFeature.VOLUME_SET
| MediaPlayerEntityFeature.VOLUME_MUTE
| MediaPlayerEntityFeature.MEDIA_ANNOUNCE
esp_flags = EspMediaPlayerEntityFeature(
self._static_info.feature_flags_compat(self._api_version)
)
if self._static_info.supports_pause:
flags |= MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY
flags = MediaPlayerEntityFeature(0)
for espflag in esp_flags:
flags |= _FEATURES[espflag]
self._attr_supported_features = flags
self._entry_data.media_player_formats[self.unique_id] = cast(
MediaPlayerInfo, static_info
@@ -257,6 +281,24 @@ class EsphomeMediaPlayer(
device_id=self._static_info.device_id,
)
@convert_api_error_ha_error
async def async_turn_on(self) -> None:
"""Send turn on command."""
self._client.media_player_command(
self._key,
command=MediaPlayerCommand.TURN_ON,
device_id=self._static_info.device_id,
)
@convert_api_error_ha_error
async def async_turn_off(self) -> None:
"""Send turn off command."""
self._client.media_player_command(
self._key,
command=MediaPlayerCommand.TURN_OFF,
device_id=self._static_info.device_id,
)
def _is_url(url: str) -> bool:
"""Validate the URL can be parsed and at least has scheme + netloc."""

View File

@@ -0,0 +1,31 @@
"""Intents for the fan integration."""
import voluptuous as vol
from homeassistant.core import HomeAssistant
from homeassistant.helpers import intent
from . import ATTR_PERCENTAGE, DOMAIN, SERVICE_TURN_ON
INTENT_FAN_SET_SPEED = "HassFanSetSpeed"
async def async_setup_intents(hass: HomeAssistant) -> None:
"""Set up the fan intents."""
intent.async_register(
hass,
intent.ServiceIntentHandler(
INTENT_FAN_SET_SPEED,
DOMAIN,
SERVICE_TURN_ON,
description="Sets a fan's speed by percentage",
required_domains={DOMAIN},
platforms={DOMAIN},
required_slots={
ATTR_PERCENTAGE: intent.IntentSlotInfo(
description="The speed percentage of the fan",
value_schema=vol.All(vol.Coerce(int), vol.Range(min=0, max=100)),
)
},
),
)

View File

@@ -30,7 +30,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: FoscamConfigEntry) -> bo
verbose=False,
)
coordinator = FoscamCoordinator(hass, entry, session)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
@@ -89,7 +88,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: FoscamConfigEntry) ->
async def async_migrate_entities(hass: HomeAssistant, entry: FoscamConfigEntry) -> None:
"""Migrate old entry."""
"""Migrate old entries to support config_entry_id-based unique IDs."""
@callback
def _update_unique_id(

View File

@@ -26,7 +26,7 @@ from .const import CONF_RTSP_PORT, CONF_STREAM, DOMAIN, LOGGER
STREAMS = ["Main", "Sub"]
DEFAULT_PORT = 88
DEFAULT_RTSP_PORT = 554
DEFAULT_RTSP_PORT = 88
DATA_SCHEMA = vol.Schema(

View File

@@ -11,3 +11,16 @@ CONF_STREAM = "stream"
SERVICE_PTZ = "ptz"
SERVICE_PTZ_PRESET = "ptz_preset"
SUPPORTED_SWITCHES = [
"flip_switch",
"mirror_switch",
"ir_switch",
"sleep_switch",
"white_light_switch",
"siren_alarm_switch",
"turn_off_volume_switch",
"light_status_switch",
"hdr_switch",
"wdr_switch",
]

View File

@@ -1,8 +1,8 @@
"""The foscam coordinator object."""
import asyncio
from dataclasses import dataclass
from datetime import timedelta
from typing import Any
from libpyfoscamcgi import FoscamCamera
@@ -15,9 +15,35 @@ from .const import DOMAIN, LOGGER
type FoscamConfigEntry = ConfigEntry[FoscamCoordinator]
class FoscamCoordinator(DataUpdateCoordinator[dict[str, Any]]):
@dataclass
class FoscamDeviceInfo:
"""A data class representing the current state and configuration of a Foscam camera device."""
dev_info: dict
product_info: dict
is_open_ir: bool
is_flip: bool
is_mirror: bool
is_asleep: dict
is_open_white_light: bool
is_siren_alarm: bool
volume: int
speak_volume: int
is_turn_off_volume: bool
is_turn_off_light: bool
is_open_wdr: bool | None = None
is_open_hdr: bool | None = None
class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
"""Foscam coordinator."""
config_entry: FoscamConfigEntry
def __init__(
self,
hass: HomeAssistant,
@@ -34,24 +60,82 @@ class FoscamCoordinator(DataUpdateCoordinator[dict[str, Any]]):
)
self.session = session
async def _async_update_data(self) -> dict[str, Any]:
def gather_all_configs(self) -> FoscamDeviceInfo:
"""Get all Foscam configurations."""
ret_dev_info, dev_info = self.session.get_dev_info()
dev_info = dev_info if ret_dev_info == 0 else {}
ret_product_info, product_info = self.session.get_product_all_info()
product_info = product_info if ret_product_info == 0 else {}
ret_ir, infra_led_config = self.session.get_infra_led_config()
is_open_ir = infra_led_config["mode"] == "1" if ret_ir == 0 else False
ret_mf, mirror_flip_setting = self.session.get_mirror_and_flip_setting()
is_flip = mirror_flip_setting["isFlip"] == "1" if ret_mf == 0 else False
is_mirror = mirror_flip_setting["isMirror"] == "1" if ret_mf == 0 else False
ret_sleep, sleep_setting = self.session.is_asleep()
is_asleep = {"supported": ret_sleep == 0, "status": bool(int(sleep_setting))}
ret_wl, is_open_white_light = self.session.getWhiteLightBrightness()
is_open_white_light_val = (
is_open_white_light["enable"] == "1" if ret_wl == 0 else False
)
ret_sc, is_siren_alarm = self.session.getSirenConfig()
is_siren_alarm_val = (
is_siren_alarm["sirenEnable"] == "1" if ret_sc == 0 else False
)
ret_vol, volume = self.session.getAudioVolume()
volume_val = int(volume["volume"]) if ret_vol == 0 else 0
ret_sv, speak_volume = self.session.getSpeakVolume()
speak_volume_val = int(speak_volume["SpeakVolume"]) if ret_sv == 0 else 0
ret_ves, is_turn_off_volume = self.session.getVoiceEnableState()
is_turn_off_volume_val = not (
ret_ves == 0 and is_turn_off_volume["isEnable"] == "1"
)
ret_les, is_turn_off_light = self.session.getLedEnableState()
is_turn_off_light_val = not (
ret_les == 0 and is_turn_off_light["isEnable"] == "0"
)
is_open_wdr = None
is_open_hdr = None
reserve3 = product_info.get("reserve3")
reserve3_int = int(reserve3) if reserve3 is not None else 0
if (reserve3_int & (1 << 8)) != 0:
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
is_open_wdr = bool(int(mode))
else:
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
is_open_hdr = bool(int(mode))
return FoscamDeviceInfo(
dev_info=dev_info,
product_info=product_info,
is_open_ir=is_open_ir,
is_flip=is_flip,
is_mirror=is_mirror,
is_asleep=is_asleep,
is_open_white_light=is_open_white_light_val,
is_siren_alarm=is_siren_alarm_val,
volume=volume_val,
speak_volume=speak_volume_val,
is_turn_off_volume=is_turn_off_volume_val,
is_turn_off_light=is_turn_off_light_val,
is_open_wdr=is_open_wdr,
is_open_hdr=is_open_hdr,
)
async def _async_update_data(self) -> FoscamDeviceInfo:
"""Fetch data from API endpoint."""
async with asyncio.timeout(30):
data = {}
ret, dev_info = await self.hass.async_add_executor_job(
self.session.get_dev_info
)
if ret == 0:
data["dev_info"] = dev_info
all_info = await self.hass.async_add_executor_job(
self.session.get_product_all_info
)
data["product_info"] = all_info[1]
ret, is_asleep = await self.hass.async_add_executor_job(
self.session.is_asleep
)
data["is_asleep"] = {"supported": ret == 0, "status": is_asleep}
return data
async with asyncio.timeout(10):
return await self.hass.async_add_executor_job(self.gather_all_configs)

View File

@@ -13,19 +13,15 @@ from .coordinator import FoscamCoordinator
class FoscamEntity(CoordinatorEntity[FoscamCoordinator]):
"""Base entity for Foscam camera."""
def __init__(
self,
coordinator: FoscamCoordinator,
entry_id: str,
) -> None:
def __init__(self, coordinator: FoscamCoordinator, config_entry_id: str) -> None:
"""Initialize the base Foscam entity."""
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, entry_id)},
identifiers={(DOMAIN, config_entry_id)},
manufacturer="Foscam",
)
if dev_info := coordinator.data.get("dev_info"):
if dev_info := coordinator.data.dev_info:
self._attr_device_info[ATTR_MODEL] = dev_info["productName"]
self._attr_device_info[ATTR_SW_VERSION] = dev_info["firmwareVer"]
self._attr_device_info[ATTR_HW_VERSION] = dev_info["hardwareVer"]

View File

@@ -6,5 +6,39 @@
"ptz_preset": {
"service": "mdi:target-variant"
}
},
"entity": {
"switch": {
"flip_switch": {
"default": "mdi:flip-vertical"
},
"mirror_switch": {
"default": "mdi:mirror"
},
"ir_switch": {
"default": "mdi:theme-light-dark"
},
"sleep_switch": {
"default": "mdi:sleep"
},
"white_light_switch": {
"default": "mdi:light-flood-down"
},
"siren_alarm_switch": {
"default": "mdi:alarm-note"
},
"turn_off_volume_switch": {
"default": "mdi:volume-off"
},
"turn_off_light_switch": {
"default": "mdi:lightbulb-fluorescent-tube"
},
"hdr_switch": {
"default": "mdi:hdr"
},
"wdr_switch": {
"default": "mdi:alpha-w-box"
}
}
}
}

View File

@@ -11,7 +11,12 @@
"stream": "Stream"
},
"data_description": {
"host": "The hostname or IP address of your Foscam camera."
"host": "The hostname or IP address of your Foscam camera.",
"port": "The port of your Foscam camera, default is 88.",
"username": "The username to log in to your Foscam camera.",
"password": "The password to log in to your Foscam camera.",
"rtsp_port": "The RTSP protocol port of the camera, used to pull the camera's real-time video stream. New model cameras only support RTSP ports 88 and 554, while old model cameras only support ports 88 and 65534.",
"stream": "Select the video stream type to pull. The main stream offers higher clarity but requires a better network environment."
}
}
},
@@ -27,8 +32,35 @@
},
"entity": {
"switch": {
"flip_switch": {
"name": "Flip"
},
"mirror_switch": {
"name": "Mirror"
},
"ir_switch": {
"name": "Infrared mode"
},
"sleep_switch": {
"name": "Sleep"
"name": "Sleep mode"
},
"white_light_switch": {
"name": "White light"
},
"siren_alarm_switch": {
"name": "Siren alarm"
},
"turn_off_volume_switch": {
"name": "Volume muted"
},
"turn_off_light_switch": {
"name": "Light"
},
"hdr_switch": {
"name": "HDR"
},
"wdr_switch": {
"name": "WDR"
}
}
},

View File

@@ -2,18 +2,117 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from libpyfoscamcgi import FoscamCamera
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import LOGGER
from .coordinator import FoscamConfigEntry, FoscamCoordinator
from .entity import FoscamEntity
def handle_ir_turn_on(session: FoscamCamera) -> None:
"""Turn on IR LED: sets IR mode to auto (if supported), then turns off the IR LED."""
session.set_infra_led_config(1)
session.open_infra_led()
def handle_ir_turn_off(session: FoscamCamera) -> None:
"""Turn off IR LED: sets IR mode to manual (if supported), then turns open the IR LED."""
session.set_infra_led_config(0)
session.close_infra_led()
@dataclass(frozen=True, kw_only=True)
class FoscamSwitchEntityDescription(SwitchEntityDescription):
"""A custom entity description that supports a turn_off function."""
native_value_fn: Callable[..., bool]
turn_off_fn: Callable[[FoscamCamera], None]
turn_on_fn: Callable[[FoscamCamera], None]
SWITCH_DESCRIPTIONS: list[FoscamSwitchEntityDescription] = [
FoscamSwitchEntityDescription(
key="is_flip",
translation_key="flip_switch",
native_value_fn=lambda data: data.is_flip,
turn_off_fn=lambda session: session.flip_video(0),
turn_on_fn=lambda session: session.flip_video(1),
),
FoscamSwitchEntityDescription(
key="is_mirror",
translation_key="mirror_switch",
native_value_fn=lambda data: data.is_mirror,
turn_off_fn=lambda session: session.mirror_video(0),
turn_on_fn=lambda session: session.mirror_video(1),
),
FoscamSwitchEntityDescription(
key="is_open_ir",
translation_key="ir_switch",
native_value_fn=lambda data: data.is_open_ir,
turn_off_fn=handle_ir_turn_off,
turn_on_fn=handle_ir_turn_on,
),
FoscamSwitchEntityDescription(
key="sleep_switch",
translation_key="sleep_switch",
native_value_fn=lambda data: data.is_asleep["status"],
turn_off_fn=lambda session: session.wake_up(),
turn_on_fn=lambda session: session.sleep(),
),
FoscamSwitchEntityDescription(
key="is_open_white_light",
translation_key="white_light_switch",
native_value_fn=lambda data: data.is_open_white_light,
turn_off_fn=lambda session: session.closeWhiteLight(),
turn_on_fn=lambda session: session.openWhiteLight(),
),
FoscamSwitchEntityDescription(
key="is_siren_alarm",
translation_key="siren_alarm_switch",
native_value_fn=lambda data: data.is_siren_alarm,
turn_off_fn=lambda session: session.setSirenConfig(0, 100, 0),
turn_on_fn=lambda session: session.setSirenConfig(1, 100, 0),
),
FoscamSwitchEntityDescription(
key="is_turn_off_volume",
translation_key="turn_off_volume_switch",
native_value_fn=lambda data: data.is_turn_off_volume,
turn_off_fn=lambda session: session.setVoiceEnableState(1),
turn_on_fn=lambda session: session.setVoiceEnableState(0),
),
FoscamSwitchEntityDescription(
key="is_turn_off_light",
translation_key="turn_off_light_switch",
native_value_fn=lambda data: data.is_turn_off_light,
turn_off_fn=lambda session: session.setLedEnableState(0),
turn_on_fn=lambda session: session.setLedEnableState(1),
),
FoscamSwitchEntityDescription(
key="is_open_hdr",
translation_key="hdr_switch",
native_value_fn=lambda data: data.is_open_hdr,
turn_off_fn=lambda session: session.setHdrMode(0),
turn_on_fn=lambda session: session.setHdrMode(1),
),
FoscamSwitchEntityDescription(
key="is_open_wdr",
translation_key="wdr_switch",
native_value_fn=lambda data: data.is_open_wdr,
turn_off_fn=lambda session: session.setWdrMode(0),
turn_on_fn=lambda session: session.setWdrMode(1),
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: FoscamConfigEntry,
@@ -22,63 +121,61 @@ async def async_setup_entry(
"""Set up foscam switch from a config entry."""
coordinator = config_entry.runtime_data
await coordinator.async_config_entry_first_refresh()
if coordinator.data["is_asleep"]["supported"]:
async_add_entities([FoscamSleepSwitch(coordinator, config_entry)])
entities = []
product_info = coordinator.data.product_info
reserve3 = product_info.get("reserve3", "0")
for description in SWITCH_DESCRIPTIONS:
if description.key == "is_asleep":
if not coordinator.data.is_asleep["supported"]:
continue
elif description.key == "is_open_hdr":
if ((1 << 8) & int(reserve3)) != 0 or ((1 << 7) & int(reserve3)) == 0:
continue
elif description.key == "is_open_wdr":
if ((1 << 8) & int(reserve3)) == 0:
continue
entities.append(FoscamGenericSwitch(coordinator, description))
async_add_entities(entities)
class FoscamSleepSwitch(FoscamEntity, SwitchEntity):
"""An implementation for Sleep Switch."""
class FoscamGenericSwitch(FoscamEntity, SwitchEntity):
"""A generic switch class for Foscam entities."""
_attr_has_entity_name = True
entity_description: FoscamSwitchEntityDescription
def __init__(
self,
coordinator: FoscamCoordinator,
config_entry: FoscamConfigEntry,
description: FoscamSwitchEntityDescription,
) -> None:
"""Initialize a Foscam Sleep Switch."""
super().__init__(coordinator, config_entry.entry_id)
"""Initialize the generic switch."""
entry_id = coordinator.config_entry.entry_id
super().__init__(coordinator, entry_id)
self._attr_unique_id = f"{config_entry.entry_id}_sleep_switch"
self._attr_translation_key = "sleep_switch"
self._attr_has_entity_name = True
self.is_asleep = self.coordinator.data["is_asleep"]["status"]
self.entity_description = description
self._attr_unique_id = f"{entry_id}_{description.key}"
@property
def is_on(self):
"""Return true if camera is asleep."""
return self.is_asleep
def is_on(self) -> bool:
"""Return the state of the switch."""
return self.entity_description.native_value_fn(self.coordinator.data)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Wake camera."""
LOGGER.debug("Wake camera")
ret, _ = await self.hass.async_add_executor_job(
self.coordinator.session.wake_up
"""Turn off the entity."""
self.hass.async_add_executor_job(
self.entity_description.turn_off_fn, self.coordinator.session
)
if ret != 0:
raise HomeAssistantError(f"Error waking up: {ret}")
await self.coordinator.async_request_refresh()
async def async_turn_on(self, **kwargs: Any) -> None:
"""But camera is sleep."""
LOGGER.debug("Sleep camera")
ret, _ = await self.hass.async_add_executor_job(self.coordinator.session.sleep)
if ret != 0:
raise HomeAssistantError(f"Error sleeping: {ret}")
"""Turn on the entity."""
self.hass.async_add_executor_job(
self.entity_description.turn_on_fn, self.coordinator.session
)
await self.coordinator.async_request_refresh()
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self.is_asleep = self.coordinator.data["is_asleep"]["status"]
self.async_write_ha_state()

View File

@@ -115,6 +115,7 @@ class FreeboxRouter:
self._api: Freepybox = api
self.name: str = freebox_config["model_info"]["pretty_name"]
self.model_id: str = freebox_config["model_info"]["name"]
self.mac: str = freebox_config["mac"]
self._sw_v: str = freebox_config["firmware_version"]
self._hw_v: str | None = freebox_config.get("board_name")
@@ -284,6 +285,7 @@ class FreeboxRouter:
manufacturer="Freebox SAS",
name=self.name,
model=self.name,
model_id=self.model_id,
sw_version=self._sw_v,
hw_version=self._hw_v,
)

View File

@@ -120,7 +120,6 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
self.fritz_guest_wifi: FritzGuestWLAN = None
self.fritz_hosts: FritzHosts = None
self.fritz_status: FritzStatus = None
self.hass = hass
self.host = host
self.mesh_role = MeshRoles.NONE
self.mesh_wifi_uplink = False

View File

@@ -2,3 +2,8 @@
DOMAIN = "fyta"
CONF_EXPIRATION = "expiration"
CONF_MAX_ACCEPTABLE = "max_acceptable"
CONF_MAX_GOOD = "max_good"
CONF_MIN_ACCEPTABLE = "min_acceptable"
CONF_MIN_GOOD = "min_good"

View File

@@ -25,6 +25,12 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from .const import (
CONF_MAX_ACCEPTABLE,
CONF_MAX_GOOD,
CONF_MIN_ACCEPTABLE,
CONF_MIN_GOOD,
)
from .coordinator import FytaConfigEntry, FytaCoordinator
from .entity import FytaPlantEntity
@@ -36,6 +42,13 @@ class FytaSensorEntityDescription(SensorEntityDescription):
value_fn: Callable[[Plant], StateType | datetime]
@dataclass(frozen=True, kw_only=True)
class FytaMeasurementSensorEntityDescription(FytaSensorEntityDescription):
"""Describes Fyta sensor entity."""
attribute_fn: Callable[[Plant], dict[str, float | None]]
PLANT_STATUS_LIST: list[str] = ["deleted", "doing_great", "need_attention", "no_sensor"]
PLANT_MEASUREMENT_STATUS_LIST: list[str] = [
"no_data",
@@ -95,35 +108,6 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
options=PLANT_MEASUREMENT_STATUS_LIST,
value_fn=lambda plant: plant.salinity_status.name.lower(),
),
FytaSensorEntityDescription(
key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.temperature,
),
FytaSensorEntityDescription(
key="light",
translation_key="light",
native_unit_of_measurement="μmol/s⋅m²",
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.light,
),
FytaSensorEntityDescription(
key="moisture",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.MOISTURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.moisture,
),
FytaSensorEntityDescription(
key="salinity",
translation_key="salinity",
native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS_PER_CM,
device_class=SensorDeviceClass.CONDUCTIVITY,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda plant: plant.salinity,
),
FytaSensorEntityDescription(
key="ph",
device_class=SensorDeviceClass.PH,
@@ -152,6 +136,62 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [
),
]
MEASUREMENT_SENSORS: Final[list[FytaMeasurementSensorEntityDescription]] = [
FytaMeasurementSensorEntityDescription(
key="temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.temperature_max_acceptable,
CONF_MAX_GOOD: plant.temperature_max_good,
CONF_MIN_ACCEPTABLE: plant.temperature_min_acceptable,
CONF_MIN_GOOD: plant.temperature_min_good,
},
value_fn=lambda plant: plant.temperature,
),
FytaMeasurementSensorEntityDescription(
key="light",
translation_key="light",
native_unit_of_measurement="μmol/s⋅m²",
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.light_max_acceptable,
CONF_MAX_GOOD: plant.light_max_good,
CONF_MIN_ACCEPTABLE: plant.light_min_acceptable,
CONF_MIN_GOOD: plant.light_min_good,
},
value_fn=lambda plant: plant.light,
),
FytaMeasurementSensorEntityDescription(
key="moisture",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.MOISTURE,
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.moisture_max_acceptable,
CONF_MAX_GOOD: plant.moisture_max_good,
CONF_MIN_ACCEPTABLE: plant.moisture_min_acceptable,
CONF_MIN_GOOD: plant.moisture_min_good,
},
value_fn=lambda plant: plant.moisture,
),
FytaMeasurementSensorEntityDescription(
key="salinity",
translation_key="salinity",
native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS_PER_CM,
device_class=SensorDeviceClass.CONDUCTIVITY,
state_class=SensorStateClass.MEASUREMENT,
attribute_fn=lambda plant: {
CONF_MAX_ACCEPTABLE: plant.salinity_max_acceptable,
CONF_MAX_GOOD: plant.salinity_max_good,
CONF_MIN_ACCEPTABLE: plant.salinity_min_acceptable,
CONF_MIN_GOOD: plant.salinity_min_good,
},
value_fn=lambda plant: plant.salinity,
),
]
async def async_setup_entry(
hass: HomeAssistant,
@@ -168,14 +208,28 @@ async def async_setup_entry(
if sensor.key in dir(coordinator.data.get(plant_id))
]
plant_entities.extend(
FytaPlantMeasurementSensor(coordinator, entry, sensor, plant_id)
for plant_id in coordinator.fyta.plant_list
for sensor in MEASUREMENT_SENSORS
if sensor.key in dir(coordinator.data.get(plant_id))
)
async_add_entities(plant_entities)
def _async_add_new_device(plant_id: int) -> None:
async_add_entities(
plant_entities = [
FytaPlantSensor(coordinator, entry, sensor, plant_id)
for sensor in SENSORS
if sensor.key in dir(coordinator.data.get(plant_id))
]
plant_entities.extend(
FytaPlantMeasurementSensor(coordinator, entry, sensor, plant_id)
for sensor in MEASUREMENT_SENSORS
if sensor.key in dir(coordinator.data.get(plant_id))
)
async_add_entities(plant_entities)
coordinator.new_device_callbacks.append(_async_add_new_device)
@@ -190,3 +244,15 @@ class FytaPlantSensor(FytaPlantEntity, SensorEntity):
"""Return the state for this sensor."""
return self.entity_description.value_fn(self.plant)
class FytaPlantMeasurementSensor(FytaPlantSensor):
"""Represents a Fyta measurement sensor."""
entity_description: FytaMeasurementSensorEntityDescription
@property
def extra_state_attributes(self) -> dict[str, float | None]:
"""Return the device state attributes."""
return self.entity_description.attribute_fn(self.plant)

View File

@@ -138,10 +138,64 @@
}
},
"light": {
"name": "Light"
"name": "Light",
"state_attributes": {
"max_acceptable": { "name": "Maximum acceptable" },
"max_good": { "name": "Maximum good" },
"min_acceptable": { "name": "Minimum acceptable" },
"min_good": { "name": "Minimum good" }
}
},
"moisture": {
"name": "[%key:component::sensor::entity_component::moisture::name%]",
"state_attributes": {
"max_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_acceptable::name%]"
},
"max_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_good::name%]"
},
"min_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_acceptable::name%]"
},
"min_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_good::name%]"
}
}
},
"salinity": {
"name": "Salinity"
"name": "Salinity",
"state_attributes": {
"max_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_acceptable::name%]"
},
"max_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_good::name%]"
},
"min_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_acceptable::name%]"
},
"min_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_good::name%]"
}
}
},
"temperature": {
"name": "[%key:component::sensor::entity_component::temperature::name%]",
"state_attributes": {
"max_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_acceptable::name%]"
},
"max_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::max_good::name%]"
},
"min_acceptable": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_acceptable::name%]"
},
"min_good": {
"name": "[%key:component::fyta::entity::sensor::light::state_attributes::min_good::name%]"
}
}
},
"last_fertilised": {
"name": "Last fertilized"

View File

@@ -29,7 +29,6 @@ class GlancesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self, hass: HomeAssistant, entry: GlancesConfigEntry, api: Glances
) -> None:
"""Initialize the Glances data."""
self.hass = hass
self.host: str = entry.data[CONF_HOST]
self.api = api
super().__init__(

View File

@@ -146,6 +146,20 @@ async def light_switch_options_schema(
)
LIGHT_CONFIG_SCHEMA = basic_group_config_schema("light").extend(
{
vol.Required(CONF_ALL, default=False): selector.BooleanSelector(),
}
)
SWITCH_CONFIG_SCHEMA = basic_group_config_schema("switch").extend(
{
vol.Required(CONF_ALL, default=False): selector.BooleanSelector(),
}
)
GROUP_TYPES = [
"binary_sensor",
"button",
@@ -210,7 +224,7 @@ CONFIG_FLOW = {
validate_user_input=set_group_type("fan"),
),
"light": SchemaFlowFormStep(
basic_group_config_schema("light"),
LIGHT_CONFIG_SCHEMA,
preview="group",
validate_user_input=set_group_type("light"),
),
@@ -235,7 +249,7 @@ CONFIG_FLOW = {
validate_user_input=set_group_type("sensor"),
),
"switch": SchemaFlowFormStep(
basic_group_config_schema("switch"),
SWITCH_CONFIG_SCHEMA,
preview="group",
validate_user_input=set_group_type("switch"),
),

View File

@@ -66,9 +66,13 @@
"light": {
"title": "[%key:component::group::config::step::user::title%]",
"data": {
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
},
"lock": {
@@ -115,9 +119,13 @@
"switch": {
"title": "[%key:component::group::config::step::user::title%]",
"data": {
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
"name": "[%key:common::config_flow::data::name%]"
},
"data_description": {
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
}
}
}

View File

@@ -74,7 +74,7 @@ class ValveControllerEntity(GuardianEntity):
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, entry.data[CONF_UID])},
manufacturer="Elexa",
model=self._diagnostics_coordinator.data["firmware"],
sw_version=self._diagnostics_coordinator.data["firmware"],
name=f"Guardian valve controller {entry.data[CONF_UID]}",
)
self._attr_unique_id = f"{entry.data[CONF_UID]}_{description.key}"

View File

@@ -1,19 +1,26 @@
"""The habitica integration."""
from uuid import UUID
from habiticalib import Habitica
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
from .const import CONF_API_USER, DOMAIN, X_CLIENT
from .coordinator import HabiticaConfigEntry, HabiticaDataUpdateCoordinator
from .coordinator import (
HabiticaConfigEntry,
HabiticaDataUpdateCoordinator,
HabiticaPartyCoordinator,
)
from .services import async_setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
HABITICA_KEY: HassKey[dict[UUID, HabiticaPartyCoordinator]] = HassKey(DOMAIN)
PLATFORMS = [
Platform.BINARY_SENSOR,
@@ -37,6 +44,8 @@ async def async_setup_entry(
hass: HomeAssistant, config_entry: HabiticaConfigEntry
) -> bool:
"""Set up habitica from a config entry."""
party_added_by_this_entry: UUID | None = None
device_reg = dr.async_get(hass)
session = async_get_clientsession(
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
@@ -54,11 +63,53 @@ async def async_setup_entry(
await coordinator.async_config_entry_first_refresh()
config_entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
party = coordinator.data.user.party.id
if HABITICA_KEY not in hass.data:
hass.data[HABITICA_KEY] = {}
if party is not None and party not in hass.data[HABITICA_KEY]:
party_coordinator = HabiticaPartyCoordinator(hass, config_entry, api)
await party_coordinator.async_config_entry_first_refresh()
hass.data[HABITICA_KEY][party] = party_coordinator
party_added_by_this_entry = party
@callback
def _party_update_listener() -> None:
"""On party change, unload coordinator, remove device and reload."""
nonlocal party, party_added_by_this_entry
party_updated = coordinator.data.user.party.id
if (
party is not None and (party not in hass.data[HABITICA_KEY])
) or party != party_updated:
if party_added_by_this_entry:
config_entry.async_create_task(
hass, shutdown_party_coordinator(hass, party_added_by_this_entry)
)
party_added_by_this_entry = None
if party:
identifier = {(DOMAIN, f"{config_entry.unique_id}_{party!s}")}
if device := device_reg.async_get_device(identifiers=identifier):
device_reg.async_update_device(
device.id, remove_config_entry_id=config_entry.entry_id
)
hass.config_entries.async_schedule_reload(config_entry.entry_id)
coordinator.async_add_listener(_party_update_listener)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True
async def shutdown_party_coordinator(hass: HomeAssistant, party_added: UUID) -> None:
"""Handle party coordinator shutdown."""
await hass.data[HABITICA_KEY][party_added].async_shutdown()
hass.data[HABITICA_KEY].pop(party_added)
async def async_unload_entry(hass: HomeAssistant, entry: HabiticaConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -6,18 +6,20 @@ from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from habiticalib import UserData
from habiticalib import ContentData, UserData
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HABITICA_KEY
from .const import ASSETS_URL
from .coordinator import HabiticaConfigEntry
from .entity import HabiticaBase
from .coordinator import HabiticaConfigEntry, HabiticaPartyCoordinator
from .entity import HabiticaBase, HabiticaPartyBase
PARALLEL_UPDATES = 1
@@ -34,6 +36,7 @@ class HabiticaBinarySensor(StrEnum):
"""Habitica Entities."""
PENDING_QUEST = "pending_quest"
QUEST_RUNNING = "quest_running"
def get_scroll_image_for_pending_quest_invitation(user: UserData) -> str | None:
@@ -62,10 +65,21 @@ async def async_setup_entry(
coordinator = config_entry.runtime_data
async_add_entities(
entities: list[BinarySensorEntity] = [
HabiticaBinarySensorEntity(coordinator, description)
for description in BINARY_SENSOR_DESCRIPTIONS
)
]
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
entities.append(
HabiticaPartyBinarySensorEntity(
party_coordinator,
config_entry,
coordinator.content,
)
)
async_add_entities(entities)
class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity):
@@ -86,3 +100,27 @@ class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity):
):
return f"{ASSETS_URL}{entity_picture}"
return None
class HabiticaPartyBinarySensorEntity(HabiticaPartyBase, BinarySensorEntity):
"""Representation of a Habitica party binary sensor."""
entity_description = BinarySensorEntityDescription(
key=HabiticaBinarySensor.QUEST_RUNNING,
translation_key=HabiticaBinarySensor.QUEST_RUNNING,
device_class=BinarySensorDeviceClass.RUNNING,
)
def __init__(
self,
coordinator: HabiticaPartyCoordinator,
config_entry: HabiticaConfigEntry,
content: ContentData,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator, config_entry, self.entity_description, content)
@property
def is_on(self) -> bool | None:
"""If the binary sensor is on."""
return self.coordinator.data.quest.active

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from abc import abstractmethod
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
@@ -13,6 +14,7 @@ from aiohttp import ClientError
from habiticalib import (
Avatar,
ContentData,
GroupData,
Habitica,
HabiticaException,
NotAuthorizedError,
@@ -49,10 +51,11 @@ class HabiticaData:
type HabiticaConfigEntry = ConfigEntry[HabiticaDataUpdateCoordinator]
class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
"""Habitica Data Update Coordinator."""
class HabiticaBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]):
"""Habitica coordinator base class."""
config_entry: HabiticaConfigEntry
_update_interval: timedelta
def __init__(
self, hass: HomeAssistant, config_entry: HabiticaConfigEntry, habitica: Habitica
@@ -63,7 +66,7 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=timedelta(seconds=60),
update_interval=self._update_interval,
request_refresh_debouncer=Debouncer(
hass,
_LOGGER,
@@ -71,8 +74,40 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
immediate=False,
),
)
self.habitica = habitica
self.content: ContentData
@abstractmethod
async def _update_data(self) -> _DataT:
"""Fetch data."""
async def _async_update_data(self) -> _DataT:
"""Fetch the latest party data."""
try:
return await self._update_data()
except TooManyRequestsError:
_LOGGER.debug("Rate limit exceeded, will try again later")
return self.data
except HabiticaException as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e.error.message)},
) from e
except ClientError as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e)},
) from e
class HabiticaDataUpdateCoordinator(HabiticaBaseCoordinator[HabiticaData]):
"""Habitica Data Update Coordinator."""
_update_interval = timedelta(seconds=30)
content: ContentData
async def _async_setup(self) -> None:
"""Set up Habitica integration."""
@@ -106,30 +141,16 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
translation_placeholders={"reason": str(e)},
) from e
async def _async_update_data(self) -> HabiticaData:
try:
user = (await self.habitica.get_user()).data
tasks = (await self.habitica.get_tasks()).data
completed_todos = (
await self.habitica.get_tasks(TaskFilter.COMPLETED_TODOS)
).data
except TooManyRequestsError:
_LOGGER.debug("Rate limit exceeded, will try again later")
return self.data
except HabiticaException as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e.error.message)},
) from e
except ClientError as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="service_call_exception",
translation_placeholders={"reason": str(e)},
) from e
else:
return HabiticaData(user=user, tasks=tasks + completed_todos)
async def _update_data(self) -> HabiticaData:
"""Fetch the latest data."""
user = (await self.habitica.get_user()).data
tasks = (await self.habitica.get_tasks()).data
completed_todos = (
await self.habitica.get_tasks(TaskFilter.COMPLETED_TODOS)
).data
return HabiticaData(user=user, tasks=tasks + completed_todos)
async def execute(self, func: Callable[[Habitica], Any]) -> None:
"""Execute an API call."""
@@ -169,3 +190,13 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
await self.habitica.generate_avatar(fp=png, avatar=avatar, fmt="PNG")
return png.getvalue()
class HabiticaPartyCoordinator(HabiticaBaseCoordinator[GroupData]):
"""Habitica Party Coordinator."""
_update_interval = timedelta(minutes=15)
async def _update_data(self) -> GroupData:
"""Fetch the latest party data."""
return (await self.habitica.get_group()).data

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from habiticalib import ContentData
from yarl import URL
from homeassistant.const import CONF_URL
@@ -12,7 +13,11 @@ from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER, NAME
from .coordinator import HabiticaDataUpdateCoordinator
from .coordinator import (
HabiticaConfigEntry,
HabiticaDataUpdateCoordinator,
HabiticaPartyCoordinator,
)
class HabiticaBase(CoordinatorEntity[HabiticaDataUpdateCoordinator]):
@@ -45,3 +50,33 @@ class HabiticaBase(CoordinatorEntity[HabiticaDataUpdateCoordinator]):
),
identifiers={(DOMAIN, coordinator.config_entry.unique_id)},
)
class HabiticaPartyBase(CoordinatorEntity[HabiticaPartyCoordinator]):
"""Base Habitica entity representing a party."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HabiticaPartyCoordinator,
config_entry: HabiticaConfigEntry,
entity_description: EntityDescription,
content: ContentData,
) -> None:
"""Initialize a Habitica party entity."""
super().__init__(coordinator)
if TYPE_CHECKING:
assert config_entry.unique_id
unique_id = f"{config_entry.unique_id}_{coordinator.data.id!s}"
self.entity_description = entity_description
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer=MANUFACTURER,
model=NAME,
name=coordinator.data.summary,
identifiers={(DOMAIN, unique_id)},
via_device=(DOMAIN, config_entry.unique_id),
)
self.content = content

View File

@@ -156,6 +156,24 @@
},
"pending_quest_items": {
"default": "mdi:sack"
},
"group_leader": {
"default": "mdi:shield-crown"
},
"quest": {
"default": "mdi:script-text-outline"
},
"boss": {
"default": "mdi:emoticon-devil"
},
"boss_hp": {
"default": "mdi:heart"
},
"boss_hp_remaining": {
"default": "mdi:heart"
},
"collected_items": {
"default": "mdi:sack"
}
},
"switch": {
@@ -172,6 +190,9 @@
"state": {
"on": "mdi:script-text-outline"
}
},
"quest_running": {
"default": "mdi:script-text-play"
}
}
},

View File

@@ -4,15 +4,21 @@ from __future__ import annotations
from enum import StrEnum
from habiticalib import Avatar, extract_avatar
from habiticalib import Avatar, ContentData, extract_avatar
from homeassistant.components.image import ImageEntity, ImageEntityDescription
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import dt as dt_util
from .coordinator import HabiticaConfigEntry, HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
from . import HABITICA_KEY
from .const import ASSETS_URL
from .coordinator import (
HabiticaConfigEntry,
HabiticaDataUpdateCoordinator,
HabiticaPartyCoordinator,
)
from .entity import HabiticaBase, HabiticaPartyBase
PARALLEL_UPDATES = 1
@@ -21,6 +27,7 @@ class HabiticaImageEntity(StrEnum):
"""Image entities."""
AVATAR = "avatar"
QUEST_IMAGE = "quest_image"
async def async_setup_entry(
@@ -31,8 +38,17 @@ async def async_setup_entry(
"""Set up the habitica image platform."""
coordinator = config_entry.runtime_data
entities: list[ImageEntity] = [HabiticaImage(hass, coordinator)]
async_add_entities([HabiticaImage(hass, coordinator)])
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
entities.append(
HabiticaPartyImage(
hass, party_coordinator, config_entry, coordinator.content
)
)
async_add_entities(entities)
class HabiticaImage(HabiticaBase, ImageEntity):
@@ -72,3 +88,58 @@ class HabiticaImage(HabiticaBase, ImageEntity):
if not self._cache and self._avatar:
self._cache = await self.coordinator.generate_avatar(self._avatar)
return self._cache
class HabiticaPartyImage(HabiticaPartyBase, ImageEntity):
"""A Habitica image entity of a party."""
entity_description = ImageEntityDescription(
key=HabiticaImageEntity.QUEST_IMAGE,
translation_key=HabiticaImageEntity.QUEST_IMAGE,
)
_attr_content_type = "image/png"
def __init__(
self,
hass: HomeAssistant,
coordinator: HabiticaPartyCoordinator,
config_entry: HabiticaConfigEntry,
content: ContentData,
) -> None:
"""Initialize the image entity."""
super().__init__(coordinator, config_entry, self.entity_description, content)
ImageEntity.__init__(self, hass)
self._attr_image_url = self.image_url
self._attr_image_last_updated = dt_util.utcnow()
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.image_url != self._attr_image_url:
self._attr_image_url = self.image_url
self._cached_image = None
self._attr_image_last_updated = dt_util.utcnow()
super()._handle_coordinator_update()
@property
def image_url(self) -> str | None:
"""Return URL of image."""
return (
f"{ASSETS_URL}quest_{key}.png"
if (key := self.coordinator.data.quest.key)
else None
)
async def _async_load_image_from_url(self, url: str) -> Image | None:
"""Load an image by url.
AWS sometimes returns 'application/octet-stream' as content-type
"""
if response := await self._fetch_url(url):
return Image(
content=response.content,
content_type=self._attr_content_type,
)
return None

View File

@@ -7,5 +7,5 @@
"iot_class": "cloud_polling",
"loggers": ["habiticalib"],
"quality_scale": "platinum",
"requirements": ["habiticalib==0.4.1"]
"requirements": ["habiticalib==0.4.2"]
}

View File

@@ -72,7 +72,7 @@ rules:
comment: Used to inform of deprecated entities and actions.
stale-devices:
status: done
comment: Not applicable. Only one device per config entry. Removed together with the config entry.
comment: Party device is remove if stale.
# Platinum
async-dependency: done

View File

@@ -8,7 +8,7 @@ from enum import StrEnum
import logging
from typing import Any
from habiticalib import ContentData, HabiticaClass, TaskData, UserData, ha
from habiticalib import ContentData, GroupData, HabiticaClass, TaskData, UserData, ha
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -20,15 +20,19 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.util import dt as dt_util
from . import HABITICA_KEY
from .const import ASSETS_URL
from .coordinator import HabiticaConfigEntry
from .entity import HabiticaBase
from .entity import HabiticaBase, HabiticaPartyBase
from .util import (
collected_quest_items,
get_attribute_points,
get_attributes_total,
inventory_list,
pending_damage,
pending_quest_items,
quest_attributes,
quest_boss,
)
_LOGGER = logging.getLogger(__name__)
@@ -55,6 +59,17 @@ class HabiticaSensorEntityDescription(SensorEntityDescription):
entity_picture: str | None = None
@dataclass(kw_only=True, frozen=True)
class HabiticaPartySensorEntityDescription(SensorEntityDescription):
"""Habitica Party Sensor Description."""
value_fn: Callable[[GroupData, ContentData], StateType]
entity_picture: Callable[[GroupData], str | None] | str | None = None
attributes_fn: Callable[[GroupData, ContentData], dict[str, Any] | None] | None = (
None
)
@dataclass(kw_only=True, frozen=True)
class HabiticaTaskSensorEntityDescription(SensorEntityDescription):
"""Habitica Task Sensor Description."""
@@ -89,6 +104,13 @@ class HabiticaSensorEntity(StrEnum):
QUEST_SCROLLS = "quest_scrolls"
PENDING_DAMAGE = "pending_damage"
PENDING_QUEST_ITEMS = "pending_quest_items"
MEMBER_COUNT = "member_count"
GROUP_LEADER = "group_leader"
QUEST = "quest"
BOSS = "boss"
BOSS_HP = "boss_hp"
BOSS_HP_REMAINING = "boss_hp_remaining"
COLLECTED_ITEMS = "collected_items"
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
@@ -262,6 +284,67 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
)
SENSOR_DESCRIPTIONS_PARTY: tuple[HabiticaPartySensorEntityDescription, ...] = (
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.MEMBER_COUNT,
translation_key=HabiticaSensorEntity.MEMBER_COUNT,
value_fn=lambda party, _: party.memberCount,
entity_picture=ha.PARTY,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.GROUP_LEADER,
translation_key=HabiticaSensorEntity.GROUP_LEADER,
value_fn=lambda party, _: party.leader.profile.name,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.QUEST,
translation_key=HabiticaSensorEntity.QUEST,
value_fn=lambda p, c: c.quests[p.quest.key].text if p.quest.key else None,
attributes_fn=quest_attributes,
entity_picture=(
lambda party: f"inventory_quest_scroll_{party.quest.key}.png"
if party.quest.key
else None
),
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.BOSS,
translation_key=HabiticaSensorEntity.BOSS,
value_fn=lambda p, c: boss.name if (boss := quest_boss(p, c)) else None,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.BOSS_HP,
translation_key=HabiticaSensorEntity.BOSS_HP,
value_fn=lambda p, c: boss.hp if (boss := quest_boss(p, c)) else None,
entity_picture=ha.HP,
suggested_display_precision=0,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.BOSS_HP_REMAINING,
translation_key=HabiticaSensorEntity.BOSS_HP_REMAINING,
value_fn=lambda p, _: p.quest.progress.hp,
entity_picture=ha.HP,
suggested_display_precision=2,
),
HabiticaPartySensorEntityDescription(
key=HabiticaSensorEntity.COLLECTED_ITEMS,
translation_key=HabiticaSensorEntity.COLLECTED_ITEMS,
value_fn=(
lambda p, _: sum(n for n in p.quest.progress.collect.values())
if p.quest.progress.collect
else None
),
attributes_fn=collected_quest_items,
entity_picture=(
lambda p: f"quest_{p.quest.key}_{k}.png"
if p.quest.progress.collect
and (k := next(iter(p.quest.progress.collect), None))
else None
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HabiticaConfigEntry,
@@ -275,6 +358,18 @@ async def async_setup_entry(
HabiticaSensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
)
if party := coordinator.data.user.party.id:
party_coordinator = hass.data[HABITICA_KEY][party]
async_add_entities(
HabiticaPartySensor(
party_coordinator,
config_entry,
description,
coordinator.content,
)
for description in SENSOR_DESCRIPTIONS_PARTY
)
class HabiticaSensor(HabiticaBase, SensorEntity):
"""A generic Habitica sensor."""
@@ -317,3 +412,39 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
)
return None
class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
"""Habitica party sensor."""
entity_description: HabiticaPartySensorEntityDescription
@property
def native_value(self) -> StateType:
"""Return the state of the device."""
return self.entity_description.value_fn(self.coordinator.data, self.content)
@property
def entity_picture(self) -> str | None:
"""Return the entity picture to use in the frontend, if any."""
pic = self.entity_description.entity_picture
entity_picture = (
pic if isinstance(pic, str) or pic is None else pic(self.coordinator.data)
)
return (
None
if not entity_picture
else entity_picture
if entity_picture.startswith("data:image")
else f"{ASSETS_URL}{entity_picture}"
)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return entity specific state attributes."""
if func := self.entity_description.attributes_fn:
return func(self.coordinator.data, self.content)
return None

View File

@@ -7,6 +7,7 @@
"unit_health_points": "HP",
"unit_mana_points": "MP",
"unit_experience_points": "XP",
"unit_items": "items",
"config_entry_description": "Select the Habitica account to update a task.",
"task_description": "The name (or task ID) of the task you want to update.",
"rename_name": "Rename",
@@ -63,7 +64,8 @@
"repeat_weekly_options_name": "Weekly repeat days",
"repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.",
"repeat_monthly_options_name": "Monthly repeat day",
"repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly."
"repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly.",
"quest_name": "Quest"
},
"config": {
"abort": {
@@ -173,6 +175,9 @@
"binary_sensor": {
"pending_quest": {
"name": "Pending quest invitation"
},
"quest_running": {
"name": "Quest status"
}
},
"button": {
@@ -251,6 +256,9 @@
"image": {
"avatar": {
"name": "Avatar"
},
"quest_image": {
"name": "[%key:component::habitica::common::quest_name%]"
}
},
"sensor": {
@@ -420,7 +428,37 @@
},
"pending_quest_items": {
"name": "Pending quest items",
"unit_of_measurement": "items"
"unit_of_measurement": "[%key:component::habitica::common::unit_items%]"
},
"member_count": {
"name": "Member count",
"unit_of_measurement": "members"
},
"group_leader": {
"name": "Group leader"
},
"quest": {
"name": "[%key:component::habitica::common::quest_name%]",
"state_attributes": {
"quest_details": {
"name": "Quest details"
}
}
},
"boss": {
"name": "Quest boss"
},
"boss_hp": {
"name": "Boss health",
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
},
"boss_hp_remaining": {
"name": "Boss health remaining",
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
},
"collected_items": {
"name": "Collected quest items",
"unit_of_measurement": "[%key:component::habitica::common::unit_items%]"
}
},
"switch": {

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from dataclasses import asdict, fields
import datetime
from math import floor
from typing import TYPE_CHECKING, Literal
from typing import TYPE_CHECKING, Any, Literal
from dateutil.rrule import (
DAILY,
@@ -21,7 +21,7 @@ from dateutil.rrule import (
YEARLY,
rrule,
)
from habiticalib import ContentData, Frequency, TaskData, UserData
from habiticalib import ContentData, Frequency, GroupData, QuestBoss, TaskData, UserData
from homeassistant.util import dt as dt_util
@@ -184,3 +184,32 @@ def pending_damage(user: UserData, content: ContentData) -> float | None:
and content.quests[user.party.quest.key].boss is not None
else None
)
def quest_attributes(party: GroupData, content: ContentData) -> dict[str, Any]:
"""Quest description."""
return {
"quest_details": content.quests[party.quest.key].notes
if party.quest.key
else None,
"quest_participants": f"{sum(x is True for x in party.quest.members.values())} / {party.memberCount}",
}
def quest_boss(party: GroupData, content: ContentData) -> QuestBoss | None:
"""Quest boss."""
return content.quests[party.quest.key].boss if party.quest.key else None
def collected_quest_items(party: GroupData, content: ContentData) -> dict[str, Any]:
"""List collected quest items."""
return (
{
collect[k].text: f"{v} / {collect[k].count}"
for k, v in party.quest.progress.collect.items()
}
if party.quest.key and (collect := content.quests[party.quest.key].collect)
else {}
)

View File

@@ -103,6 +103,7 @@ ISSUE_KEYS_FOR_REPAIRS = {
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
ISSUE_KEY_ADDON_DETACHED_ADDON_MISSING,
ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED,
"issue_system_disk_lifetime",
}
_LOGGER = logging.getLogger(__name__)

View File

@@ -115,6 +115,10 @@
}
}
},
"issue_system_disk_lifetime": {
"title": "Disk lifetime exceeding 90%",
"description": "The data disk has exceeded 90% of its expected lifespan. The disk may soon malfunction which can lead to data loss. You should replace it soon and migrate your data."
},
"unhealthy": {
"title": "Unhealthy system - {reason}",
"description": "System is currently unhealthy due to {reason}. For troubleshooting information, select Learn more."

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
import asyncio
from asyncio import sleep as asyncio_sleep
from collections import defaultdict
from collections.abc import Callable
@@ -54,7 +53,6 @@ _LOGGER = logging.getLogger(__name__)
MAX_EXECUTIONS_TIME_WINDOW = 60 * 60 # 1 hour
MAX_EXECUTIONS = 8
UPDATE_PROGRAMS_INTERVAL_SEC = 1 # 1 second
type HomeConnectConfigEntry = ConfigEntry[HomeConnectCoordinator]
@@ -253,30 +251,11 @@ class HomeConnectCoordinator(
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
):
program_key = ProgramKey(cast(str, event.value))
# If the active program is unknown, options must be updated using the selected program.
if (
event_key
is EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM
and event.value is ProgramKey.UNKNOWN
):
select_program_event = events.get(
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM
)
if select_program_event:
program_key = cast(
ProgramKey,
select_program_event.value,
)
# Wait for a second before updating options because it may take time for the Home Connect API
# to update the options after the program change.
await asyncio.sleep(
UPDATE_PROGRAMS_INTERVAL_SEC
)
await self.update_options(
event_message_ha_id, event_key, program_key
event_message_ha_id,
event_key,
ProgramKey(cast(str, event.value)),
)
events[event_key] = event
self._call_event_listener(event_message)

Some files were not shown because too many files have changed in this diff Show More