Compare commits

...

148 Commits

Author SHA1 Message Date
jbouwh
4ad5ce6d52 Set climate state to off if is_on is False 2025-01-01 13:17:57 +00:00
Jan Bouwhuis
a117a3cba9 Update homeassistant/components/climate/__init__.py
Co-authored-by: Allen Porter <allen@thebends.org>
2024-12-31 21:52:52 +01:00
jbouwh
e2d9ca9cd9 Allow a climate entity to have an independent on / off state attribute 2024-12-30 21:57:19 +00:00
Dan Raper
a0fb6df5ba Add battery sensor to ohme (#134222)
* Add battery sensor to ohme

* Forgot the snapshots!

* Add translation key to battery

* Change car to vehicle and fix snapshot tests

* Fix snapshot again - not sure what was going on with my local dev env
2024-12-30 20:15:11 +01:00
Bram Kragten
04020d5a56 Update frontend to 20241230.0 (#134284) 2024-12-30 20:04:50 +01:00
Norbert Rittel
f785b17314 Fix two descriptions of yeelight actions (#134282) 2024-12-30 19:22:12 +01:00
Andrew Jackson
6631c57cfb Bump aiomealie to 0.9.5 (#134274) 2024-12-30 17:47:58 +01:00
Norbert Rittel
bc76dc3c34 Remove excessive period at end of action name (#134272) 2024-12-30 16:22:30 +01:00
Ludovic BOUÉ
ea4931ca3a Bump Python Matter server to 7.0.0 (Matter 1.4) (#132502)
* Matter 1.4 rename BridgedDevice device type

BREAKING change in the client: BridgedDevice is renamed to BridgedNode in the device types with Matter 1.4

* `ColorMode` enum type is renamed to `ColorModeEnum`

* Item `ColorTemperature` renamed to `ColorTemperatureMireds`

* Update ColorControl bitmaps and attributes

* Bump Python Matter server to 7.0.0 (Matter 1.4)

* Bump requirements to Python Matter server to 7.0.0
2024-12-30 15:41:14 +01:00
Arne Keller
dd20204bf0 ollama: update to 0.4.5 (#134265) 2024-12-30 14:42:46 +01:00
Norbert Rittel
ef46c62bc6 Make triggers and condition for monetary sensor consistent (#131184) 2024-12-30 13:47:16 +01:00
Alberto Geniola
2bb6e03a36 Bump elmax-api (#133845) 2024-12-30 13:46:53 +01:00
G Johansson
2288f89415 Fix duplicate sensor disk entities in Systemmonitor (#134139) 2024-12-30 13:38:48 +01:00
Josef Zweck
e7ab5afc14 Bump pylamarzocco to 1.4.5 (#134259)
* Bump pylamarzocco to 1.4.4

* Bump pylamarzocco to 1.4.5

---------

Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-12-30 12:27:32 +01:00
Adam Goode
4db88dfaff Quickly process unavailable metrics in Prometheus (#133219) 2024-12-30 12:05:33 +01:00
Joost Lekkerkerker
906c95048c Record LG WebOS TV Quality scale (#133732)
Co-authored-by: Shay Levy <levyshay1@gmail.com>
2024-12-30 11:45:44 +01:00
Manu
df38c1b1d7 Remove deprecated yaml import from OTP integration (#134196) 2024-12-30 11:12:16 +01:00
tronikos
af97bf1c5f Fix 400 This voice does not support speaking rate or pitch parameters at this time for Google Cloud Journey voices (#134255) 2024-12-30 09:20:35 +01:00
tronikos
a7c2d96ecf Avoid KeyError for ignored entries in async_step_zeroconf of Android TV Remote (#134250) 2024-12-30 10:13:51 +02:00
Noah Husby
1b06b4e45b Remove unused translations from Russound RIO (#134246) 2024-12-30 10:11:37 +02:00
Manu
b74b9bc360 Bump habiticalib to v0.3.2 (#134244) 2024-12-30 10:10:18 +02:00
Brett Adams
810689ce66 Handle missing application credentials in Tesla Fleet (#134237)
* Handle missing application credentials

* Add tests

* Test reauth starts

* Only catch ValueError
2024-12-29 22:21:18 -08:00
G Johansson
249d93574a Set Scrape sensor unavailable when errors (#134143) 2024-12-29 22:59:57 +01:00
Michael
e2c59f276a Bump aiopegelonline to 0.1.1 (#134230)
bump aiopegelonline to 0.1.1
2024-12-29 21:36:49 +01:00
Manu
9804e8aa98 Add reauth flow to Habitica integration (#131676)
* Add reauth flow to Habitica integration

* tests, invalid_credentials string

* test only api_key

* section consts

* test config entry

* test reauth is triggered

* set reauthentication-flow to done

* use consts in tests

* reauth_entry

* changes

* fix import

* changes
2024-12-29 21:12:36 +01:00
Paul Daumlechner
53e69af088 Bump pyvlx to 0.2.26 (#115483) 2024-12-29 10:00:26 -10:00
tronikos
1530edbe20 Bump opower to 0.8.7 (#134228)
* Bump opower to 0.8.7

* update deps
2024-12-29 11:44:33 -08:00
Paulus Schoutsen
7dbf32d693 Bump frontend to 20241229.0 (#134225) 2024-12-29 13:35:46 -05:00
Michael Hansen
49646ad994 Bump VoIP utils to 0.2.2 (#134219) 2024-12-29 11:56:27 -06:00
G Johansson
1e652db37f Use config entry runtime data in Open-Meteo (#134198) 2024-12-29 18:16:41 +01:00
Dan Raper
88d366b0c5 Add slot list service to ohme (#134170)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-12-29 18:07:12 +01:00
Lucas Gasenzer
65147f8d4c Fix Wake on LAN Port input as Box instead of Slider (#134216) 2024-12-29 18:03:41 +01:00
Simone Chemelli
52b919101a Bump aiocomelit to 0.10.1 (#134214) 2024-12-29 17:30:52 +01:00
Aaron Bach
24fd74d839 Change SimpliSafe websocket reconnection log to DEBUG-level (#134063)
* Change SimpliSafe websocket reconnection log to `DEBUG`-level

* revert
2024-12-29 11:23:44 -05:00
Marc Mueller
2599faa622 Fix method subtyping [helpers] (#134213) 2024-12-29 17:16:38 +01:00
Marc Mueller
3df91cfba5 Fix method subtyping [recorder] (#134212) 2024-12-29 17:16:11 +01:00
Marc Mueller
d3fab42c85 Fix method subtyping [knx] (#134211) 2024-12-29 16:41:23 +01:00
Marc Mueller
beb881492a Fix method subtyping [elkm1] (#134210) 2024-12-29 16:40:51 +01:00
Matthias Alphart
9d7c7f9fcf Update knx-frontend to 2024.12.26.233449 (#134184) 2024-12-29 16:39:37 +01:00
Shay Levy
419307a7c4 Bump aioswitcher to 6.0.0 (#134185) 2024-12-29 15:42:33 +01:00
G Johansson
409dc4ad48 Move coordinator to own file in Open-Meteo (#134197) 2024-12-29 15:25:40 +01:00
Michael
7704ef95a4 Make feedreader recoverable (#134202)
raise ConfigEntryNotReady on connection errors during setup
2024-12-29 15:08:15 +01:00
Manu
0db07a033b Migrate Habitica integration to habiticalib (#131032)
* Migrate data to habiticalib

* Add habiticalib to init and coordinator

* Migrate Habitica config flow to habiticalib

* migrate init to habiticalib

* migrate buttons to habiticalib

* migrate switch to habiticalib

* update habiticalib

* cast_skill action

* migrate update_score

* migrate transformation items action

* migrate quest actions

* fix fixture errors

* Migrate coordinator data and content

* bump habiticalib

* Remove habitipy and use wrapper in habiticalub

* changes

* some fixes

* minor refactoring

* class_needed annotation

* Update diagnostics

* do integration setup in coordinator setup

* small changes

* raise HomeAssistantError for TooManyRequestsError

* fix docstring

* update tests

* changes to tests/snapshots

* fix update_todo_item
2024-12-29 15:00:31 +01:00
Joost Lekkerkerker
4717eb3142 Bump python-overseerr to 0.4.0 (#134192) 2024-12-29 15:46:30 +02:00
Joost Lekkerkerker
c23f5c9f2c Make elevenlabs recoverable (#134094)
* Make elevenlabs recoverable

* Add tests for entry setup

* Use the same fixtures for setup and config flow

* Update tests/components/elevenlabs/test_setup.py

Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com>

---------

Co-authored-by: Simon Sorg <simon.sorg@student.hpi.de>
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
Co-authored-by: Simon <80467011+sorgfresser@users.noreply.github.com>
2024-12-29 14:26:59 +01:00
Michael
873b078bb3 Make PEGELONLINE recoverable (#134199) 2024-12-29 14:07:45 +01:00
Manu
0dd93a18c5 Add button platform to IronOS integration (#133678)
* Add button platform to IronOS integration

* Add tests

* load platform

* refactor

* update tests
2024-12-29 12:39:13 +01:00
Maikel Punie
da96e2077b Add Velbus Button tests (#134186)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-12-29 11:55:52 +01:00
Manu
1d69cf11a5 Bump pynecil to v3.0.1 (#134174) 2024-12-29 10:06:29 +02:00
Manu
adb1fbbbc4 Add switch platform to IronOS integration (#133691)
* Add switch platform

* Add tests

* prevent switch bouncing

* some changes

* icons

* update tests

* changes
2024-12-28 21:59:06 +01:00
G Johansson
645f2e44b9 Fix Nord Pool empty response (#134033)
* Fix Nord Pool empty response

* Mods

* reset validate prices
2024-12-28 21:38:04 +01:00
Artur Pragacz
b3aede611a Fix Onkyo volume rounding (#134157) 2024-12-28 21:34:01 +01:00
jb101010-2
72a96249b1 Suez_water: clear quality scale (#134027)
* Suez_water: clear quality scale

Revert invalid done rules and mark inapplicable ones as exempted.

* Mark entity disabled as todo

* Mark devices as todo

* missing push

* Update homeassistant/components/suez_water/quality_scale.yaml

Co-authored-by: Josef Zweck <josef@zweck.dev>

* Update quality_scale.yaml

* Update quality_scale.yaml again

---------

Co-authored-by: Josef Zweck <josef@zweck.dev>
2024-12-28 20:59:11 +01:00
Joost Lekkerkerker
80dbce14ec Add binary sensor to Tile (#134153) 2024-12-28 16:49:14 +01:00
Manu
0376f75ee3 Bump pynecil to v3.0.0 (#134151) 2024-12-28 16:48:28 +01:00
jb101010-2
e58bd62c68 Suez_water: use meter id as unique_id (#133959)
* Suez_water: use meter id as unique_id

* Review fixes

* No more afraid check :)

* review again

* Apply suggestions from code review

---------

Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2024-12-28 16:25:10 +01:00
Matthias Alphart
6dbcd130b0 Add quality_scale.yaml for KNX (#133937)
* Add quality_scale.yaml

* Update quality_scale.yaml
2024-12-28 16:24:49 +01:00
Andrew Jackson
4639f57014 Remove deprecated Mastodon yaml config import (#134040)
* Remove Mastodon yaml import

* Revert removal of async_migrate_entry
2024-12-28 16:22:32 +01:00
G Johansson
4080455c12 Use x,y in roborock action call (#134133)
* Use x,y in roborock action call

* Fix description
2024-12-28 16:12:09 +01:00
Joost Lekkerkerker
df7d518f38 Add versions to Tile device (#134150)
* Add versions to Tile device

* Add versions to Tile device
2024-12-28 16:04:36 +01:00
Joost Lekkerkerker
47adfb574f Bump python-overseerr to 0.3.0 (#134147)
Bump Overseerr to 0.3.0
2024-12-28 15:44:15 +01:00
Joost Lekkerkerker
4c5d0c2ec4 Add Tile device tracker tests (#134137) 2024-12-28 15:36:56 +01:00
G Johansson
4febe43021 Add missing device classes in scrape (#134141) 2024-12-28 15:36:23 +01:00
Maikel Punie
af13979855 Add Velbus binary sensor tests (#134132)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2024-12-28 14:57:48 +01:00
Marc Mueller
d9f2140df3 Add ClassVar annotation for singleton patterns (#134135) 2024-12-28 13:17:15 +01:00
Joost Lekkerkerker
cc80108629 Bump yt-dlp to 2024.12.23 (#134131) 2024-12-28 13:13:07 +01:00
Joost Lekkerkerker
16af76b968 Add Tile device tests (#134138) 2024-12-28 13:10:13 +01:00
Joost Lekkerkerker
590f0ce61f Refactor Tile tests (#134130) 2024-12-28 12:37:21 +01:00
Allen Porter
14059c6df8 Remove unused parameters from function calls in rainbird (#134124)
Remove unused parameters from rainbird function calls
2024-12-28 11:34:27 +00:00
Joost Lekkerkerker
268c21addd Add Overseerr integration (#133981)
* Add Overseerr integration

* Add Overseerr integration

* Fix

* Fix

* Fix

* Fix

* Fix

* Fix

* Fix
2024-12-28 11:50:36 +01:00
Andre Lengwenus
565fa4ea1f Remove incorrect device check in LCN events (#134116) 2024-12-28 09:26:49 +01:00
Raj Laud
28cd7f2473 Bump pysqueezebox to v0.11.1 (#134097) 2024-12-28 09:24:22 +01:00
Noah Husby
aceb1b39ba Add mute support to Russound RIO (#134118) 2024-12-28 09:22:13 +01:00
Allen Porter
6edf06f8a4 Converge stream av open methods, options, and error handling (#134020)
* Converge stream av open methods, options, and error handling

* Remove exception that is never thrown

* Update exceptions thrown in generic tests

* Increase stream test coverage
2024-12-27 18:47:33 -08:00
Noah Husby
07ae9b15d0 Bump aiorussound to 4.2.0 (#134117) 2024-12-27 18:23:57 -08:00
G Johansson
d676169b04 Cleanup devices in Nord Pool from reconfiguration (#134043)
* Cleanup devices in Nord Pool from reconfiguration

* Mods

* Mod
2024-12-27 21:33:37 +01:00
Noah Husby
24ce3d7daa Remove deprecated yaml import for Russound RIO (#134072) 2024-12-27 21:27:33 +01:00
Joost Lekkerkerker
417e736746 Migrate Tile to use entry.runtime_data (#134107) 2024-12-27 21:25:36 +01:00
Cyrill Raccaud
bb8d4ca255 Add unit test for sensors in swiss public transport (#134115)
* add unit test for sensors

* clean up
2024-12-27 21:21:45 +01:00
Joost Lekkerkerker
375af6cb1c Introduce base entity for Tile (#134109) 2024-12-27 21:18:01 +01:00
Jan Bouwhuis
263e0acd3a Set PARALLEL_UPDATES for incomfort entity platforms (#134110) 2024-12-27 20:43:30 +01:00
Erwin Douna
da531d0e4e Bump Tado to 0.18.5 (#133988) 2024-12-27 20:26:19 +01:00
Joost Lekkerkerker
844e36c8fe Bump python-homeassistant-analytics to 0.8.1 (#134101) 2024-12-27 20:21:12 +01:00
Joost Lekkerkerker
9976c07f89 Remove YAML import from Tile (#134108) 2024-12-27 20:15:48 +01:00
Aaron Bach
7df9d2e938 Bump pytile to 2024.12.0 (#134103) 2024-12-27 20:04:35 +01:00
Joost Lekkerkerker
52318f5f37 Extract Tile coordinator in separate file (#134104) 2024-12-27 19:30:13 +01:00
Joost Lekkerkerker
b9c2b3f7e3 Remove Tile unique id migration (#134106) 2024-12-27 19:25:10 +01:00
Andrew Sayre
a9ff5b8007 Bump pyheos to v0.8.0 (#134069)
Bump pyheos and update usage
2024-12-27 11:01:35 -06:00
Joost Lekkerkerker
7076ba7c9d Make google tasks recoverable (#134092) 2024-12-27 08:52:33 -08:00
Josef Zweck
5e0088feaa Add azure_data_explorer to microsoft brand (#134088) 2024-12-27 15:36:07 +01:00
Franck Nijhof
f8399b2c0f Revert "Add state_class to EcoWittSensorTypes.DEGREE" (#134079) 2024-12-27 13:17:47 +01:00
Matthias Alphart
415fdf4956 Fix KNX config flow translations and add data descriptions (#134078)
* Fix KNX config flow translations and add data descriptions

* Update strings.json

* typo
2024-12-27 12:59:52 +01:00
Noah Husby
ad89004189 Remove timeout from Russound RIO initialization (#134070) 2024-12-27 11:01:10 +01:00
Noah Husby
b6afbe4b29 Bump aiorussound to 4.1.1 (#134058)
* Bump aiorussound to 4.1.1

* Trigger Build

* Trigger Build
2024-12-26 22:03:50 -06:00
Cyrill Raccaud
402340955e Fix swiss public transport line field none (#133964)
* fix #133116

The line can theoretically be none, when no line info is available (lets say walking sections first?)

* fix line field

* add unit test with missing line field
2024-12-27 00:24:47 +01:00
Raphael Hehl
b2a160d926 Roborock Add vacuum_goto service (#133994)
* Roborock Add vacuum_goto service to control vacuum movement to specified coordinates

* roborock Add type specification for x_coord and y_coord in vacuum_goto service

* roborock Add get_current_position service to retrieve vacuum's current coordinates

* Rename vacuum services for clarity and consistency

* Apply suggestions from code review

Co-authored-by: G Johansson <goran.johansson@shiftit.se>

* Add integration field to vacuum service targets for Roborock

---------

Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2024-12-27 00:20:09 +01:00
Thomas Kunzfeld
9840785363 Add state_class to EcoWittSensorTypes.DEGREE (#134004)
Add state_class to EcoWittSensorTypes.DEGREE (#129260)
2024-12-27 00:12:54 +01:00
jb101010-2
a53c92d4b5 Suez_water: remove redundant log on refresh failure (#134025)
Suez_water: remove redundent log on refresh failure
2024-12-27 00:05:28 +01:00
Marc Mueller
adc97b6c15 Fix unifiprotect DeprecationWarnings in tests (#134060) 2024-12-26 23:50:03 +01:00
Jan Bouwhuis
7b2a5d0684 Remove mqtt publish templates after 6 months of deprecation (#134056) 2024-12-26 23:25:44 +01:00
Diogo Gomes
acb511d395 Bump pyipma to 3.0.8 (#134055)
bump pyipma
2024-12-26 21:01:53 +00:00
Norbert Rittel
c025390c6c Replace "service" with "action" plus fixed descriptions (#134053) 2024-12-26 15:39:18 -05:00
J. Nick Koston
942fbdedcf Ensure all states have been migrated to use timestamps (#134007) 2024-12-26 07:48:55 -10:00
Allen Porter
3bfb6707e9 Fix Nest ConfigEntry typing (#134021) 2024-12-26 09:27:20 -08:00
Norbert Rittel
5172139579 Use correct uppercase for abbreviations (#134028)
Fix the spelling of "SSDP" and "MAC" (address) to ensure proper translations.
2024-12-26 11:09:30 +01:00
Norbert Rittel
cfb43c7b58 Fix typo in get_command action description (#134026) 2024-12-26 09:56:08 +01:00
Allen Porter
45657ece7c Improve Google Tasks error messages (#134023) 2024-12-26 09:53:20 +01:00
Erwin Douna
f7fe2f2122 Tado update code owners (#133987)
Update code owners
2024-12-26 09:13:24 +01:00
Allen Porter
c75222e63c Bump python-google-nest-sdm to 7.0.0 (#134016)
Update python-google-nest-sdm to 7.0.0
2024-12-26 00:03:44 -05:00
Brett Adams
299250ebec Bump Tesla Fleet API library (#134019)
Bump Tesla Fleet
2024-12-25 23:26:55 -05:00
Josef Zweck
ed8e242049 Bump pylamarzocco to 1.4.3 (#134008) 2024-12-25 16:25:13 -08:00
Cyrill Raccaud
95e4a40ad5 Update silver docs for swiss public transport (#134001)
update docs
2024-12-25 21:36:30 +01:00
Christopher Fenner
e61717ce7a Fulfill IQS rule docs-removal-instructions in ViCare integration (#133982)
update iqs state
2024-12-25 15:30:33 +01:00
Cyrill Raccaud
73b6bd8bd3 Add config flow data description to swiss public transport (#133997)
* add config flow data description

* improve strings
2024-12-25 15:20:09 +01:00
Cyrill Raccaud
60774c69cd Add clear shopping list button for Cookidoo (#133583)
* add clear button

* set clear button to disabled per default

* add actions exception
2024-12-25 14:58:19 +01:00
Cyrill Raccaud
c383b41a12 Add parallel updates to swiss public transport (#133996)
add parallel updates
2024-12-25 14:55:34 +01:00
J. Nick Koston
05a8b773b9 Bump numpy to 2.2.1 (#133844)
* Bump numpy to 2.2.1

changelog: https://github.com/numpy/numpy/compare/v2.2.0...v2.2.1

* make sure ninja is up to date

* Revert "make sure ninja is up to date"

This reverts commit a26dd8b768.

* test

* Revert "test"

This reverts commit 972f40e3ee.

* try a single build

* try a single build

* Revert "Revert "test""

This reverts commit ec282ce021.

* Revert "Revert "Revert "test"""

This reverts commit 315599cbae.

* Revert "try a single build"

This reverts commit 63529dd2c5.

* Revert "try a single build"

This reverts commit 7058ae9288.
2024-12-25 11:27:00 +02:00
G-Two
1bee423c22 Bump subarulink to 0.7.13 (#133970) 2024-12-25 10:13:04 +02:00
Marc Mueller
687afd23bc Add pip wheel build constraints to fix numpy builds (#133962) 2024-12-24 15:06:21 -10:00
cdnninja
0020c48a15 Update pyvesync version (#131433) 2024-12-24 17:51:40 +01:00
Bram Kragten
760cbcc596 Update frontend to 20241224.0 (#133963) 2024-12-24 16:41:36 +01:00
Philipp Danner
da8f4e5b57 fix "Slow" response leads to "Could not find a charging station" #124129 (#133889)
fix #124129
2024-12-24 14:00:34 +01:00
Claudio Ruggeri - CR-Tech
5c0659c8df Fix reload modbus component issue (#133820)
fix issue 116675
2024-12-24 13:57:18 +01:00
Marc Mueller
15806c2af6 Update Jinja2 to 3.1.5 (#133951) 2024-12-24 13:44:09 +01:00
Maikel Punie
97d8d16cc5 Bump velbusaio to 2024.12.3 (#133939) 2024-12-24 12:35:22 +02:00
Khole
33435fa36f Hive: Fix error when device goes offline (#133848) 2024-12-24 10:42:35 +01:00
Joost Lekkerkerker
6fc1cfded9 Use SignedSession in Xbox (#133938) 2024-12-24 10:17:02 +01:00
Franck Nijhof
a9d6a42781 Update apprise to v1.9.1 (#133936) 2024-12-24 10:15:21 +01:00
Kevin Worrel
f2a706ecf7 Make screenlogic state enums lowercase (#133866) 2024-12-24 09:12:18 +01:00
G-Two
4a2ae7f6fd Stop using shared aiohttp client session for Subaru integration (#133931) 2024-12-24 08:59:51 +01:00
Franck Nijhof
771ead9d7b Prevent imports from tests in core codebase (#133928)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2024-12-24 08:40:05 +01:00
Jordi
2d5e2aa4b4 Add Harvey virtual integration (#133874)
Add harvey virtual integration
2024-12-24 08:01:50 +01:00
Franck Nijhof
6f11524b84 Remove myself as codeowner from Tuya integration (#133921) 2024-12-24 07:55:44 +01:00
Dave T
561f319e3b Fix missing % in string for generic camera (#133925)
Fix missing % in generic camera string
2024-12-24 07:45:13 +01:00
Franck Nijhof
0c9ec4b699 Fix Peblar import in data coordinator (#133926) 2024-12-24 07:42:48 +01:00
Brett Adams
cbb2930805 Slow down polling in Teslemetry (#133924) 2024-12-24 01:59:36 +01:00
Franck Nijhof
aa29a93fbe Remove myself as codeowner from Plugwise (#133920) 2024-12-24 01:34:23 +01:00
J. Nick Koston
ff4ba553c4 Sort integration platforms preload list (#133905)
* Sort integration platforms preload list

https://github.com/home-assistant/core/pull/133856#discussion_r1895385026

* sort

* Sort them all

---------

Co-authored-by: Franck Nijhof <frenck@frenck.nl>
2024-12-23 23:38:59 +01:00
Thomas55555
2f101c5054 Catch ClientConnectorError and TimeOutError in APSystems (#132027) 2024-12-23 22:49:59 +01:00
karwosts
72e2b835d9 Fix a history stats bug when window and tracked state change simultaneously (#133770) 2024-12-23 22:47:26 +01:00
Martin Mrazik
8f6e4cd294 Map RGB+CCT to RGB for WLED (#133900) 2024-12-23 22:26:38 +01:00
Mick Vleeshouwer
bd0edd4996 Revise codeowners for Overkiz (#133784) 2024-12-23 22:24:22 +01:00
J. Nick Koston
3f441e7090 Ensure cloud and recorder backup platforms do not have to wait for the import executor (#133907)
* Ensure cloud and recorder backup platforms do not have to wait for the import executor

partially fixes #133904

* backup.backup as well
2024-12-23 22:19:28 +01:00
Abílio Costa
253098d79c Mark missing IQS requirements for Idasen Desk as done (#133910) 2024-12-23 21:38:27 +01:00
Abílio Costa
53ebf84339 Add cronsim to default dependencies (#133913) 2024-12-23 21:34:36 +01:00
J. Nick Koston
7cfbc3eeae Fix duplicate call to async_register_preload_platform (#133909) 2024-12-23 09:20:44 -10:00
Franck Nijhof
8d32531bc1 Bump version to 2025.2.0dev0 (#133893) 2024-12-23 17:54:32 +01:00
Simon
30d95f37d8 Add removal instructions to ElevenLabs (#133895) 2024-12-23 18:37:19 +02:00
356 changed files with 11575 additions and 4538 deletions

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 11
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2025.1"
HA_SHORT_VERSION: "2025.2"
DEFAULT_PYTHON: "3.12"
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
# 10.3 is the oldest supported version

View File

@@ -76,8 +76,20 @@ jobs:
# Use C-Extension for SQLAlchemy
echo "REQUIRE_SQLALCHEMY_CEXT=1"
# Add additional pip wheel build constraints
echo "PIP_CONSTRAINT=build_constraints.txt"
) > .env_file
- name: Write pip wheel build constraints
run: |
(
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
# this caused the numpy builds to fail
# https://github.com/scikit-build/ninja-python-distributions/issues/274
echo "ninja==1.11.1.1"
) > build_constraints.txt
- name: Upload env_file
uses: actions/upload-artifact@v4.5.0
with:
@@ -86,6 +98,13 @@ jobs:
include-hidden-files: true
overwrite: true
- name: Upload build_constraints
uses: actions/upload-artifact@v4.5.0
with:
name: build_constraints
path: ./build_constraints.txt
overwrite: true
- name: Upload requirements_diff
uses: actions/upload-artifact@v4.5.0
with:
@@ -123,6 +142,11 @@ jobs:
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.8
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.8
with:
@@ -167,6 +191,11 @@ jobs:
with:
name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.8
with:
name: build_constraints
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.8
with:

View File

@@ -362,6 +362,7 @@ homeassistant.components.openuv.*
homeassistant.components.oralb.*
homeassistant.components.otbr.*
homeassistant.components.overkiz.*
homeassistant.components.overseerr.*
homeassistant.components.p1_monitor.*
homeassistant.components.panel_custom.*
homeassistant.components.peblar.*

View File

@@ -1103,8 +1103,10 @@ build.json @home-assistant/supervisor
/tests/components/otbr/ @home-assistant/core
/homeassistant/components/ourgroceries/ @OnFreund
/tests/components/ourgroceries/ @OnFreund
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
/homeassistant/components/overkiz/ @imicknl
/tests/components/overkiz/ @imicknl
/homeassistant/components/overseerr/ @joostlek
/tests/components/overseerr/ @joostlek
/homeassistant/components/ovo_energy/ @timmo001
/tests/components/ovo_energy/ @timmo001
/homeassistant/components/p1_monitor/ @klaasnicolaas
@@ -1135,8 +1137,8 @@ build.json @home-assistant/supervisor
/tests/components/plaato/ @JohNan
/homeassistant/components/plex/ @jjlawren
/tests/components/plex/ @jjlawren
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
/tests/components/plugwise/ @CoMPaTech @bouwew
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
/homeassistant/components/point/ @fredrike
@@ -1478,8 +1480,8 @@ build.json @home-assistant/supervisor
/tests/components/system_bridge/ @timmo001
/homeassistant/components/systemmonitor/ @gjohansson-ST
/tests/components/systemmonitor/ @gjohansson-ST
/homeassistant/components/tado/ @chiefdragon @erwindouna
/tests/components/tado/ @chiefdragon @erwindouna
/homeassistant/components/tado/ @erwindouna
/tests/components/tado/ @erwindouna
/homeassistant/components/tag/ @balloob @dmulcahey
/tests/components/tag/ @balloob @dmulcahey
/homeassistant/components/tailscale/ @frenck
@@ -1573,8 +1575,8 @@ build.json @home-assistant/supervisor
/tests/components/triggercmd/ @rvmey
/homeassistant/components/tts/ @home-assistant/core
/tests/components/tts/ @home-assistant/core
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
/tests/components/tuya/ @Tuya @zlinoliver @frenck
/homeassistant/components/tuya/ @Tuya @zlinoliver
/tests/components/tuya/ @Tuya @zlinoliver
/homeassistant/components/twentemilieu/ @frenck
/tests/components/twentemilieu/ @frenck
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen

View File

@@ -2,6 +2,7 @@
"domain": "microsoft",
"name": "Microsoft",
"integrations": [
"azure_data_explorer",
"azure_devops",
"azure_event_hub",
"azure_service_bus",

View File

@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["python_homeassistant_analytics"],
"requirements": ["python-homeassistant-analytics==0.8.0"],
"requirements": ["python-homeassistant-analytics==0.8.1"],
"single_config_entry": true
}

View File

@@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
# and one of them, which could end up being in discovery_info.host, is from a
# different device. If any of the discovery_info.ip_addresses matches the
# existing host, don't update the host.
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
if (
existing_config_entry
# Ignored entries don't have host
and CONF_HOST in existing_config_entry.data
and len(discovery_info.ip_addresses) > 1
):
existing_host = existing_config_entry.data[CONF_HOST]
if existing_host != self.host:
if existing_host in [

View File

@@ -6,5 +6,5 @@
"iot_class": "cloud_push",
"loggers": ["apprise"],
"quality_scale": "legacy",
"requirements": ["apprise==1.9.0"]
"requirements": ["apprise==1.9.1"]
}

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from aiohttp import ClientConnectorError
from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode
from homeassistant.const import UnitOfPower
from homeassistant.core import HomeAssistant
@@ -45,7 +47,13 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
async def async_update(self) -> None:
"""Set the state with the value fetched from the inverter."""
self._attr_native_value = await self._api.get_max_power()
try:
status = await self._api.get_max_power()
except (TimeoutError, ClientConnectorError):
self._attr_available = False
else:
self._attr_available = True
self._attr_native_value = status
async def async_set_native_value(self, value: float) -> None:
"""Set the desired output power."""

View File

@@ -5,6 +5,10 @@ from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.hassio import is_hassio
from homeassistant.helpers.typing import ConfigType
# Pre-import backup to avoid it being imported
# later when the import executor is busy and delaying
# startup
from . import backup # noqa: F401
from .agent import (
BackupAgent,
BackupAgentError,

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import configparser
from dataclasses import dataclass
import logging
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, ClassVar
from urllib.parse import urlparse
import aiohttp
@@ -129,7 +129,7 @@ class ChromecastInfo:
class ChromeCastZeroconf:
"""Class to hold a zeroconf instance."""
__zconf: zeroconf.HaZeroconf | None = None
__zconf: ClassVar[zeroconf.HaZeroconf | None] = None
@classmethod
def set_zeroconf(cls, zconf: zeroconf.HaZeroconf) -> None:

View File

@@ -240,6 +240,7 @@ CACHED_PROPERTIES_WITH_ATTR_ = {
"preset_mode",
"preset_modes",
"is_aux_heat",
"is_on",
"fan_mode",
"fan_modes",
"swing_mode",
@@ -280,6 +281,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
_attr_hvac_mode: HVACMode | None
_attr_hvac_modes: list[HVACMode]
_attr_is_aux_heat: bool | None
_attr_is_on: bool | None
_attr_max_humidity: float = DEFAULT_MAX_HUMIDITY
_attr_max_temp: float
_attr_min_humidity: float = DEFAULT_MIN_HUMIDITY
@@ -352,11 +354,33 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
hvac_mode = self.hvac_mode
if hvac_mode is None:
return None
if hasattr(self, "_attr_is_on") and self._attr_is_on is False:
return HVACMode.OFF.value
# Support hvac_mode as string for custom integration backwards compatibility
if not isinstance(hvac_mode, HVACMode):
return HVACMode(hvac_mode).value # type: ignore[unreachable]
return hvac_mode.value
@property
def is_on(self) -> bool | None:
"""Return True if the climate is turned on.
The climate's on/off state can be be controlled independently
from the hvac_action and hvac_mode if the _attr_is_on attribute is set.
If the _attr_is_on attribute is set, then return that value.
Otherwise, return True if hvac_action is not None and not HVACAction.OFF.
Return None if hvac_action is None,
otherwise return True if hvac_mode is not HVACMode.OFF.
"""
if hasattr(self, "_attr_is_on"):
return self._attr_is_on
if self.hvac_action is not None:
return self.hvac_action != HVACAction.OFF
if self.hvac_mode is None:
return None
return self.hvac_mode != HVACMode.OFF
@property
def precision(self) -> float:
"""Return the precision of the system."""

View File

@@ -36,7 +36,14 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
from homeassistant.util.signal_type import SignalType
from . import account_link, http_api
# Pre-import backup to avoid it being imported
# later when the import executor is busy and delaying
# startup
from . import (
account_link,
backup, # noqa: F401
http_api,
)
from .client import CloudClient
from .const import (
CONF_ACCOUNT_LINK_SERVER,

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"requirements": ["aiocomelit==0.9.1"]
"requirements": ["aiocomelit==0.10.1"]
}

View File

@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/compensation",
"iot_class": "calculated",
"quality_scale": "legacy",
"requirements": ["numpy==2.2.0"]
"requirements": ["numpy==2.2.1"]
}

View File

@@ -16,7 +16,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
PLATFORMS: list[Platform] = [Platform.TODO]
PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.TODO]
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:

View File

@@ -0,0 +1,70 @@
"""Support for Cookidoo buttons."""
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from cookidoo_api import Cookidoo, CookidooException
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
from .entity import CookidooBaseEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class CookidooButtonEntityDescription(ButtonEntityDescription):
"""Describes cookidoo button entity."""
press_fn: Callable[[Cookidoo], Awaitable[None]]
TODO_CLEAR = CookidooButtonEntityDescription(
key="todo_clear",
translation_key="todo_clear",
press_fn=lambda client: client.clear_shopping_list(),
entity_registry_enabled_default=False,
)
async def async_setup_entry(
hass: HomeAssistant,
entry: CookidooConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Cookidoo button entities based on a config entry."""
coordinator = entry.runtime_data
async_add_entities([CookidooButton(coordinator, TODO_CLEAR)])
class CookidooButton(CookidooBaseEntity, ButtonEntity):
"""Defines an Cookidoo button."""
entity_description: CookidooButtonEntityDescription
def __init__(
self,
coordinator: CookidooDataUpdateCoordinator,
description: CookidooButtonEntityDescription,
) -> None:
"""Initialize cookidoo button."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}"
async def async_press(self) -> None:
"""Press the button."""
try:
await self.entity_description.press_fn(self.coordinator.cookidoo)
except CookidooException as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="button_clear_todo_failed",
) from e
await self.coordinator.async_refresh()

View File

@@ -1,5 +1,10 @@
{
"entity": {
"button": {
"todo_clear": {
"default": "mdi:cart-off"
}
},
"todo": {
"ingredient_list": {
"default": "mdi:cart-plus"

View File

@@ -48,6 +48,11 @@
}
},
"entity": {
"button": {
"todo_clear": {
"name": "Clear shopping list and additional purchases"
}
},
"todo": {
"ingredient_list": {
"name": "Shopping list"
@@ -58,6 +63,9 @@
}
},
"exceptions": {
"button_clear_todo_failed": {
"message": "Failed to clear all items from the Cookidoo shopping list"
},
"todo_save_item_failed": {
"message": "Failed to save {name} to Cookidoo shopping list"
},

View File

@@ -50,7 +50,7 @@
"services": {
"get_command": {
"name": "Get command",
"description": "Send sa generic HTTP get command.",
"description": "Sends a generic HTTP get command.",
"fields": {
"command": {
"name": "Command",

View File

@@ -57,11 +57,11 @@
"services": {
"get_gas_prices": {
"name": "Get gas prices",
"description": "Request gas prices from easyEnergy.",
"description": "Requests gas prices from easyEnergy.",
"fields": {
"config_entry": {
"name": "Config Entry",
"description": "The config entry to use for this service."
"description": "The configuration entry to use for this action."
},
"incl_vat": {
"name": "VAT Included",
@@ -79,7 +79,7 @@
},
"get_energy_usage_prices": {
"name": "Get energy usage prices",
"description": "Request usage energy prices from easyEnergy.",
"description": "Requests usage energy prices from easyEnergy.",
"fields": {
"config_entry": {
"name": "[%key:component::easyenergy::services::get_gas_prices::fields::config_entry::name%]",
@@ -101,7 +101,7 @@
},
"get_energy_return_prices": {
"name": "Get energy return prices",
"description": "Request return energy prices from easyEnergy.",
"description": "Requests return energy prices from easyEnergy.",
"fields": {
"config_entry": {
"name": "[%key:component::easyenergy::services::get_gas_prices::fields::config_entry::name%]",

View File

@@ -6,11 +6,16 @@ from dataclasses import dataclass
from elevenlabs import AsyncElevenLabs, Model
from elevenlabs.core import ApiError
from httpx import ConnectError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.helpers.httpx_client import get_async_client
from .const import CONF_MODEL
@@ -48,6 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
model_id = entry.options[CONF_MODEL]
try:
model = await get_model_by_id(client, model_id)
except ConnectError as err:
raise ConfigEntryNotReady("Failed to connect") from err
except ApiError as err:
raise ConfigEntryAuthFailed("Auth failed") from err

View File

@@ -13,7 +13,7 @@ rules:
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: todo
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: >

View File

@@ -49,7 +49,7 @@ class ElkBinarySensor(ElkAttachedEntity, BinarySensorEntity):
_element: Zone
_attr_entity_registry_enabled_default = False
def _element_changed(self, _: Element, changeset: Any) -> None:
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
# Zone in NORMAL state is OFF; any other state is ON
self._attr_is_on = bool(
self._element.logical_status != ZoneLogicalStatus.NORMAL

View File

@@ -120,7 +120,7 @@ class ElkCounter(ElkSensor):
_attr_icon = "mdi:numeric"
_element: Counter
def _element_changed(self, _: Element, changeset: Any) -> None:
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
self._attr_native_value = self._element.value
@@ -153,7 +153,7 @@ class ElkKeypad(ElkSensor):
attrs["last_keypress"] = self._element.last_keypress
return attrs
def _element_changed(self, _: Element, changeset: Any) -> None:
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
self._attr_native_value = temperature_to_state(
self._element.temperature, UNDEFINED_TEMPERATURE
)
@@ -173,7 +173,7 @@ class ElkPanel(ElkSensor):
attrs["system_trouble_status"] = self._element.system_trouble_status
return attrs
def _element_changed(self, _: Element, changeset: Any) -> None:
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
if self._elk.is_connected():
self._attr_native_value = (
"Paused" if self._element.remote_programming_status else "Connected"
@@ -188,7 +188,7 @@ class ElkSetting(ElkSensor):
_attr_translation_key = "setting"
_element: Setting
def _element_changed(self, _: Element, changeset: Any) -> None:
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
self._attr_native_value = self._element.value
@property
@@ -257,7 +257,7 @@ class ElkZone(ElkSensor):
return UnitOfElectricPotential.VOLT
return None
def _element_changed(self, _: Element, changeset: Any) -> None:
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
if self._element.definition == ZoneType.TEMPERATURE:
self._attr_native_value = temperature_to_state(
self._element.temperature, UNDEFINED_TEMPERATURE

View File

@@ -151,7 +151,9 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
port=self._panel_direct_port,
)
)
ssl_context = build_direct_ssl_context(cadata=self._panel_direct_ssl_cert)
ssl_context = await self.hass.async_add_executor_job(
build_direct_ssl_context, self._panel_direct_ssl_cert
)
# Attempt the connection to make sure the pin works. Also, take the chance to retrieve the panel ID via APIs.
client_api_url = get_direct_api_url(

View File

@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/elmax",
"iot_class": "cloud_polling",
"loggers": ["elmax_api"],
"requirements": ["elmax-api==0.0.6.3"],
"requirements": ["elmax-api==0.0.6.4rc0"],
"zeroconf": [
{
"type": "_elmax-ssl._tcp.local."

View File

@@ -14,6 +14,7 @@ import feedparser
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
@@ -101,7 +102,11 @@ class FeedReaderCoordinator(
async def async_setup(self) -> None:
"""Set up the feed manager."""
feed = await self._async_fetch_feed()
try:
feed = await self._async_fetch_feed()
except UpdateFailed as err:
raise ConfigEntryNotReady from err
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
if feed_author := feed["feed"].get("author"):
self.feed_author = html.unescape(feed_author)

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20241223.1"]
"requirements": ["home-assistant-frontend==20241230.0"]
}

View File

@@ -255,10 +255,6 @@ async def async_test_and_preview_stream(
"""
if not (stream_source := info.get(CONF_STREAM_SOURCE)):
return None
# Import from stream.worker as stream cannot reexport from worker
# without forcing the av dependency on default_config
# pylint: disable-next=import-outside-toplevel
from homeassistant.components.stream.worker import StreamWorkerError
if not isinstance(stream_source, template_helper.Template):
stream_source = template_helper.Template(stream_source, hass)
@@ -294,8 +290,6 @@ async def async_test_and_preview_stream(
f"{DOMAIN}.test_stream",
)
hls_provider = stream.add_provider(HLS_PROVIDER)
except StreamWorkerError as err:
raise InvalidStreamException("unknown_with_details", str(err)) from err
except PermissionError as err:
raise InvalidStreamException("stream_not_permitted") from err
except OSError as err:

View File

@@ -77,7 +77,7 @@
},
"error": {
"unknown": "[%key:common::config_flow::error::unknown%]",
"unknown_with_details": "[%key:common::config_flow::error::unknown_with_details]",
"unknown_with_details": "[%key:component::generic::config::error::unknown_with_details%]",
"already_exists": "[%key:component::generic::config::error::already_exists%]",
"unable_still_load": "[%key:component::generic::config::error::unable_still_load%]",
"unable_still_load_auth": "[%key:component::generic::config::error::unable_still_load_auth%]",

View File

@@ -20,6 +20,10 @@ CONF_GAIN = "gain"
CONF_PROFILES = "profiles"
CONF_TEXT_TYPE = "text_type"
DEFAULT_SPEED = 1.0
DEFAULT_PITCH = 0
DEFAULT_GAIN = 0
# STT constants
CONF_STT_MODEL = "stt_model"

View File

@@ -31,7 +31,10 @@ from .const import (
CONF_SPEED,
CONF_TEXT_TYPE,
CONF_VOICE,
DEFAULT_GAIN,
DEFAULT_LANG,
DEFAULT_PITCH,
DEFAULT_SPEED,
)
DEFAULT_VOICE = ""
@@ -104,15 +107,15 @@ def tts_options_schema(
),
vol.Optional(
CONF_SPEED,
default=defaults.get(CONF_SPEED, 1.0),
default=defaults.get(CONF_SPEED, DEFAULT_SPEED),
): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)),
vol.Optional(
CONF_PITCH,
default=defaults.get(CONF_PITCH, 0),
default=defaults.get(CONF_PITCH, DEFAULT_PITCH),
): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)),
vol.Optional(
CONF_GAIN,
default=defaults.get(CONF_GAIN, 0),
default=defaults.get(CONF_GAIN, DEFAULT_GAIN),
): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)),
vol.Optional(
CONF_PROFILES,

View File

@@ -35,7 +35,10 @@ from .const import (
CONF_SPEED,
CONF_TEXT_TYPE,
CONF_VOICE,
DEFAULT_GAIN,
DEFAULT_LANG,
DEFAULT_PITCH,
DEFAULT_SPEED,
DOMAIN,
)
from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema
@@ -191,11 +194,23 @@ class BaseGoogleCloudProvider:
ssml_gender=gender,
name=voice,
),
# Avoid: "This voice does not support speaking rate or pitch parameters at this time."
# by not specifying the fields unless they differ from the defaults
audio_config=texttospeech.AudioConfig(
audio_encoding=encoding,
speaking_rate=options[CONF_SPEED],
pitch=options[CONF_PITCH],
volume_gain_db=options[CONF_GAIN],
speaking_rate=(
options[CONF_SPEED]
if options[CONF_SPEED] != DEFAULT_SPEED
else None
),
pitch=(
options[CONF_PITCH]
if options[CONF_PITCH] != DEFAULT_PITCH
else None
),
volume_gain_db=(
options[CONF_GAIN] if options[CONF_GAIN] != DEFAULT_GAIN else None
),
effects_profile_id=options[CONF_PROFILES],
),
)

View File

@@ -9,6 +9,7 @@ from google.oauth2.credentials import Credentials
from googleapiclient.discovery import Resource, build
from googleapiclient.errors import HttpError
from googleapiclient.http import BatchHttpRequest, HttpRequest
from httplib2 import ServerNotFoundError
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
@@ -115,7 +116,7 @@ class AsyncConfigEntryAuth:
def response_handler(_, response, exception: HttpError) -> None:
if exception is not None:
raise GoogleTasksApiError(
f"Google Tasks API responded with error ({exception.status_code})"
f"Google Tasks API responded with error ({exception.reason or exception.status_code})"
) from exception
if response:
data = json.loads(response)
@@ -150,9 +151,9 @@ class AsyncConfigEntryAuth:
async def _execute(self, request: HttpRequest | BatchHttpRequest) -> Any:
try:
result = await self._hass.async_add_executor_job(request.execute)
except HttpError as err:
except (HttpError, ServerNotFoundError) as err:
raise GoogleTasksApiError(
f"Google Tasks API responded with error ({err.status_code})"
f"Google Tasks API responded with: {err.reason or err.status_code})"
) from err
if result:
_raise_if_error(result)

View File

@@ -1,27 +1,15 @@
"""The habitica integration."""
from http import HTTPStatus
from aiohttp import ClientResponseError
from habitipy.aio import HabitipyAsync
from habiticalib import Habitica
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
APPLICATION_NAME,
CONF_API_KEY,
CONF_NAME,
CONF_URL,
CONF_VERIFY_SSL,
Platform,
__version__,
)
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import CONF_API_USER, DEVELOPER_ID, DOMAIN
from .const import CONF_API_USER, DOMAIN, X_CLIENT
from .coordinator import HabiticaDataUpdateCoordinator
from .services import async_setup_services
from .types import HabiticaConfigEntry
@@ -51,47 +39,17 @@ async def async_setup_entry(
) -> bool:
"""Set up habitica from a config entry."""
class HAHabitipyAsync(HabitipyAsync):
"""Closure API class to hold session."""
def __call__(self, **kwargs):
return super().__call__(websession, **kwargs)
def _make_headers(self) -> dict[str, str]:
headers = super()._make_headers()
headers.update(
{"x-client": f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"}
)
return headers
websession = async_get_clientsession(
session = async_get_clientsession(
hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True)
)
api = await hass.async_add_executor_job(
HAHabitipyAsync,
{
"url": config_entry.data[CONF_URL],
"login": config_entry.data[CONF_API_USER],
"password": config_entry.data[CONF_API_KEY],
},
api = Habitica(
session,
api_user=config_entry.data[CONF_API_USER],
api_key=config_entry.data[CONF_API_KEY],
url=config_entry.data[CONF_URL],
x_client=X_CLIENT,
)
try:
user = await api.user.get(userFields="profile")
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
raise ConfigEntryNotReady(e) from e
if not config_entry.data.get(CONF_NAME):
name = user["profile"]["name"]
hass.config_entries.async_update_entry(
config_entry,
data={**config_entry.data, CONF_NAME: name},
)
coordinator = HabiticaDataUpdateCoordinator(hass, api)
await coordinator.async_config_entry_first_refresh()

View File

@@ -5,7 +5,8 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from typing import Any
from habiticalib import UserData
from homeassistant.components.binary_sensor import (
BinarySensorEntity,
@@ -23,8 +24,8 @@ from .types import HabiticaConfigEntry
class HabiticaBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Habitica Binary Sensor Description."""
value_fn: Callable[[dict[str, Any]], bool | None]
entity_picture: Callable[[dict[str, Any]], str | None]
value_fn: Callable[[UserData], bool | None]
entity_picture: Callable[[UserData], str | None]
class HabiticaBinarySensor(StrEnum):
@@ -33,10 +34,10 @@ class HabiticaBinarySensor(StrEnum):
PENDING_QUEST = "pending_quest"
def get_scroll_image_for_pending_quest_invitation(user: dict[str, Any]) -> str | None:
def get_scroll_image_for_pending_quest_invitation(user: UserData) -> str | None:
"""Entity picture for pending quest invitation."""
if user["party"]["quest"].get("key") and user["party"]["quest"]["RSVPNeeded"]:
return f"inventory_quest_scroll_{user["party"]["quest"]["key"]}.png"
if user.party.quest.key and user.party.quest.RSVPNeeded:
return f"inventory_quest_scroll_{user.party.quest.key}.png"
return None
@@ -44,7 +45,7 @@ BINARY_SENSOR_DESCRIPTIONS: tuple[HabiticaBinarySensorEntityDescription, ...] =
HabiticaBinarySensorEntityDescription(
key=HabiticaBinarySensor.PENDING_QUEST,
translation_key=HabiticaBinarySensor.PENDING_QUEST,
value_fn=lambda user: user["party"]["quest"]["RSVPNeeded"],
value_fn=lambda user: user.party.quest.RSVPNeeded,
entity_picture=get_scroll_image_for_pending_quest_invitation,
),
)

View File

@@ -5,10 +5,17 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from http import HTTPStatus
from typing import Any
from aiohttp import ClientResponseError
from aiohttp import ClientError
from habiticalib import (
HabiticaClass,
HabiticaException,
NotAuthorizedError,
Skill,
TaskType,
TooManyRequestsError,
)
from homeassistant.components.button import (
DOMAIN as BUTTON_DOMAIN,
@@ -20,7 +27,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import ASSETS_URL, DOMAIN, HEALER, MAGE, ROGUE, WARRIOR
from .const import ASSETS_URL, DOMAIN
from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
from .types import HabiticaConfigEntry
@@ -34,7 +41,7 @@ class HabiticaButtonEntityDescription(ButtonEntityDescription):
press_fn: Callable[[HabiticaDataUpdateCoordinator], Any]
available_fn: Callable[[HabiticaData], bool]
class_needed: str | None = None
class_needed: HabiticaClass | None = None
entity_picture: str | None = None
@@ -63,35 +70,33 @@ BUTTON_DESCRIPTIONS: tuple[HabiticaButtonEntityDescription, ...] = (
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.RUN_CRON,
translation_key=HabitipyButtonEntity.RUN_CRON,
press_fn=lambda coordinator: coordinator.api.cron.post(),
available_fn=lambda data: data.user["needsCron"],
press_fn=lambda coordinator: coordinator.habitica.run_cron(),
available_fn=lambda data: data.user.needsCron is True,
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.BUY_HEALTH_POTION,
translation_key=HabitipyButtonEntity.BUY_HEALTH_POTION,
press_fn=(
lambda coordinator: coordinator.api["user"]["buy-health-potion"].post()
),
press_fn=lambda coordinator: coordinator.habitica.buy_health_potion(),
available_fn=(
lambda data: data.user["stats"]["gp"] >= 25
and data.user["stats"]["hp"] < 50
lambda data: (data.user.stats.gp or 0) >= 25
and (data.user.stats.hp or 0) < 50
),
entity_picture="shop_potion.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS,
translation_key=HabitipyButtonEntity.ALLOCATE_ALL_STAT_POINTS,
press_fn=lambda coordinator: coordinator.api["user"]["allocate-now"].post(),
press_fn=lambda coordinator: coordinator.habitica.allocate_stat_points(),
available_fn=(
lambda data: data.user["preferences"].get("automaticAllocation") is True
and data.user["stats"]["points"] > 0
lambda data: data.user.preferences.automaticAllocation is True
and (data.user.stats.points or 0) > 0
),
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.REVIVE,
translation_key=HabitipyButtonEntity.REVIVE,
press_fn=lambda coordinator: coordinator.api["user"]["revive"].post(),
available_fn=lambda data: data.user["stats"]["hp"] == 0,
press_fn=lambda coordinator: coordinator.habitica.revive(),
available_fn=lambda data: data.user.stats.hp == 0,
),
)
@@ -100,166 +105,170 @@ CLASS_SKILLS: tuple[HabiticaButtonEntityDescription, ...] = (
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.MPHEAL,
translation_key=HabitipyButtonEntity.MPHEAL,
press_fn=lambda coordinator: coordinator.api.user.class_.cast["mpheal"].post(),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 12
and data.user["stats"]["mp"] >= 30
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.ETHEREAL_SURGE)
),
class_needed=MAGE,
available_fn=(
lambda data: (data.user.stats.lvl or 0) >= 12
and (data.user.stats.mp or 0) >= 30
),
class_needed=HabiticaClass.MAGE,
entity_picture="shop_mpheal.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.EARTH,
translation_key=HabitipyButtonEntity.EARTH,
press_fn=lambda coordinator: coordinator.api.user.class_.cast["earth"].post(),
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.EARTHQUAKE),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 13
and data.user["stats"]["mp"] >= 35
lambda data: (data.user.stats.lvl or 0) >= 13
and (data.user.stats.mp or 0) >= 35
),
class_needed=MAGE,
class_needed=HabiticaClass.MAGE,
entity_picture="shop_earth.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.FROST,
translation_key=HabitipyButtonEntity.FROST,
press_fn=lambda coordinator: coordinator.api.user.class_.cast["frost"].post(),
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.CHILLING_FROST)
),
# chilling frost can only be cast once per day (streaks buff is false)
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 14
and data.user["stats"]["mp"] >= 40
and not data.user["stats"]["buffs"]["streaks"]
lambda data: (data.user.stats.lvl or 0) >= 14
and (data.user.stats.mp or 0) >= 40
and not data.user.stats.buffs.streaks
),
class_needed=MAGE,
class_needed=HabiticaClass.MAGE,
entity_picture="shop_frost.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.DEFENSIVE_STANCE,
translation_key=HabitipyButtonEntity.DEFENSIVE_STANCE,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast[
"defensiveStance"
].post()
lambda coordinator: coordinator.habitica.cast_skill(Skill.DEFENSIVE_STANCE)
),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 12
and data.user["stats"]["mp"] >= 25
lambda data: (data.user.stats.lvl or 0) >= 12
and (data.user.stats.mp or 0) >= 25
),
class_needed=WARRIOR,
class_needed=HabiticaClass.WARRIOR,
entity_picture="shop_defensiveStance.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.VALOROUS_PRESENCE,
translation_key=HabitipyButtonEntity.VALOROUS_PRESENCE,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast[
"valorousPresence"
].post()
lambda coordinator: coordinator.habitica.cast_skill(Skill.VALOROUS_PRESENCE)
),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 13
and data.user["stats"]["mp"] >= 20
lambda data: (data.user.stats.lvl or 0) >= 13
and (data.user.stats.mp or 0) >= 20
),
class_needed=WARRIOR,
class_needed=HabiticaClass.WARRIOR,
entity_picture="shop_valorousPresence.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.INTIMIDATE,
translation_key=HabitipyButtonEntity.INTIMIDATE,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast["intimidate"].post()
lambda coordinator: coordinator.habitica.cast_skill(Skill.INTIMIDATING_GAZE)
),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 14
and data.user["stats"]["mp"] >= 15
lambda data: (data.user.stats.lvl or 0) >= 14
and (data.user.stats.mp or 0) >= 15
),
class_needed=WARRIOR,
class_needed=HabiticaClass.WARRIOR,
entity_picture="shop_intimidate.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.TOOLS_OF_TRADE,
translation_key=HabitipyButtonEntity.TOOLS_OF_TRADE,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast["toolsOfTrade"].post()
lambda coordinator: coordinator.habitica.cast_skill(
Skill.TOOLS_OF_THE_TRADE
)
),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 13
and data.user["stats"]["mp"] >= 25
lambda data: (data.user.stats.lvl or 0) >= 13
and (data.user.stats.mp or 0) >= 25
),
class_needed=ROGUE,
class_needed=HabiticaClass.ROGUE,
entity_picture="shop_toolsOfTrade.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.STEALTH,
translation_key=HabitipyButtonEntity.STEALTH,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast["stealth"].post()
),
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.STEALTH),
# Stealth buffs stack and it can only be cast if the amount of
# unfinished dailies is smaller than the amount of buffs
# buffs is smaller than the amount of unfinished dailies
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 14
and data.user["stats"]["mp"] >= 45
and data.user["stats"]["buffs"]["stealth"]
lambda data: (data.user.stats.lvl or 0) >= 14
and (data.user.stats.mp or 0) >= 45
and (data.user.stats.buffs.stealth or 0)
< len(
[
r
for r in data.tasks
if r.get("type") == "daily"
and r.get("isDue") is True
and r.get("completed") is False
if r.Type is TaskType.DAILY
and r.isDue is True
and r.completed is False
]
)
),
class_needed=ROGUE,
class_needed=HabiticaClass.ROGUE,
entity_picture="shop_stealth.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.HEAL,
translation_key=HabitipyButtonEntity.HEAL,
press_fn=lambda coordinator: coordinator.api.user.class_.cast["heal"].post(),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 11
and data.user["stats"]["mp"] >= 15
and data.user["stats"]["hp"] < 50
press_fn=(
lambda coordinator: coordinator.habitica.cast_skill(Skill.HEALING_LIGHT)
),
class_needed=HEALER,
available_fn=(
lambda data: (data.user.stats.lvl or 0) >= 11
and (data.user.stats.mp or 0) >= 15
and (data.user.stats.hp or 0) < 50
),
class_needed=HabiticaClass.HEALER,
entity_picture="shop_heal.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.BRIGHTNESS,
translation_key=HabitipyButtonEntity.BRIGHTNESS,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast["brightness"].post()
lambda coordinator: coordinator.habitica.cast_skill(
Skill.SEARING_BRIGHTNESS
)
),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 12
and data.user["stats"]["mp"] >= 15
lambda data: (data.user.stats.lvl or 0) >= 12
and (data.user.stats.mp or 0) >= 15
),
class_needed=HEALER,
class_needed=HabiticaClass.HEALER,
entity_picture="shop_brightness.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.PROTECT_AURA,
translation_key=HabitipyButtonEntity.PROTECT_AURA,
press_fn=(
lambda coordinator: coordinator.api.user.class_.cast["protectAura"].post()
lambda coordinator: coordinator.habitica.cast_skill(Skill.PROTECTIVE_AURA)
),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 13
and data.user["stats"]["mp"] >= 30
lambda data: (data.user.stats.lvl or 0) >= 13
and (data.user.stats.mp or 0) >= 30
),
class_needed=HEALER,
class_needed=HabiticaClass.HEALER,
entity_picture="shop_protectAura.png",
),
HabiticaButtonEntityDescription(
key=HabitipyButtonEntity.HEAL_ALL,
translation_key=HabitipyButtonEntity.HEAL_ALL,
press_fn=lambda coordinator: coordinator.api.user.class_.cast["healAll"].post(),
press_fn=lambda coordinator: coordinator.habitica.cast_skill(Skill.BLESSING),
available_fn=(
lambda data: data.user["stats"]["lvl"] >= 14
and data.user["stats"]["mp"] >= 25
lambda data: (data.user.stats.lvl or 0) >= 14
and (data.user.stats.mp or 0) >= 25
),
class_needed=HEALER,
class_needed=HabiticaClass.HEALER,
entity_picture="shop_healAll.png",
),
)
@@ -285,10 +294,10 @@ async def async_setup_entry(
for description in CLASS_SKILLS:
if (
coordinator.data.user["stats"]["lvl"] >= 10
and coordinator.data.user["flags"]["classSelected"]
and not coordinator.data.user["preferences"]["disableClasses"]
and description.class_needed == coordinator.data.user["stats"]["class"]
(coordinator.data.user.stats.lvl or 0) >= 10
and coordinator.data.user.flags.classSelected
and not coordinator.data.user.preferences.disableClasses
and description.class_needed is coordinator.data.user.stats.Class
):
if description.key not in skills_added:
buttons.append(HabiticaButton(coordinator, description))
@@ -322,17 +331,17 @@ class HabiticaButton(HabiticaBase, ButtonEntity):
"""Handle the button press."""
try:
await self.entity_description.press_fn(self.coordinator)
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
if e.status == HTTPStatus.UNAUTHORIZED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="service_call_unallowed",
) from e
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except NotAuthorizedError as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="service_call_unallowed",
) from e
except (HabiticaException, ClientError) as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",

View File

@@ -5,8 +5,11 @@ from __future__ import annotations
from abc import abstractmethod
from datetime import date, datetime, timedelta
from enum import StrEnum
from typing import TYPE_CHECKING
from uuid import UUID
from dateutil.rrule import rrule
from habiticalib import TaskType
from homeassistant.components.calendar import (
CalendarEntity,
@@ -20,7 +23,6 @@ from homeassistant.util import dt as dt_util
from . import HabiticaConfigEntry
from .coordinator import HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
from .types import HabiticaTaskType
from .util import build_rrule, get_recurrence_rule
@@ -83,9 +85,7 @@ class HabiticaCalendarEntity(HabiticaBase, CalendarEntity):
@property
def start_of_today(self) -> datetime:
"""Habitica daystart."""
return dt_util.start_of_local_day(
datetime.fromisoformat(self.coordinator.data.user["lastCron"])
)
return dt_util.start_of_local_day(self.coordinator.data.user.lastCron)
def get_recurrence_dates(
self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None
@@ -115,13 +115,13 @@ class HabiticaTodosCalendarEntity(HabiticaCalendarEntity):
events = []
for task in self.coordinator.data.tasks:
if not (
task["type"] == HabiticaTaskType.TODO
and not task["completed"]
and task.get("date") # only if has due date
task.Type is TaskType.TODO
and not task.completed
and task.date is not None # only if has due date
):
continue
start = dt_util.start_of_local_day(datetime.fromisoformat(task["date"]))
start = dt_util.start_of_local_day(task.date)
end = start + timedelta(days=1)
# return current and upcoming events or events within the requested range
@@ -132,21 +132,23 @@ class HabiticaTodosCalendarEntity(HabiticaCalendarEntity):
if end_date and start > end_date:
# Event starts after date range
continue
if TYPE_CHECKING:
assert task.text
assert task.id
events.append(
CalendarEvent(
start=start.date(),
end=end.date(),
summary=task["text"],
description=task["notes"],
uid=task["id"],
summary=task.text,
description=task.notes,
uid=str(task.id),
)
)
return sorted(
events,
key=lambda event: (
event.start,
self.coordinator.data.user["tasksOrder"]["todos"].index(event.uid),
self.coordinator.data.user.tasksOrder.todos.index(UUID(event.uid)),
),
)
@@ -189,7 +191,7 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity):
events = []
for task in self.coordinator.data.tasks:
# only dailies that that are not 'grey dailies'
if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]):
if not (task.Type is TaskType.DAILY and task.everyX):
continue
recurrences = build_rrule(task)
@@ -199,19 +201,21 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity):
for recurrence in recurrence_dates:
is_future_event = recurrence > self.start_of_today
is_current_event = (
recurrence <= self.start_of_today and not task["completed"]
recurrence <= self.start_of_today and not task.completed
)
if not is_future_event and not is_current_event:
continue
if TYPE_CHECKING:
assert task.text
assert task.id
events.append(
CalendarEvent(
start=recurrence.date(),
end=self.end_date(recurrence, end_date),
summary=task["text"],
description=task["notes"],
uid=task["id"],
summary=task.text,
description=task.notes,
uid=str(task.id),
rrule=get_recurrence_rule(recurrences),
)
)
@@ -219,7 +223,7 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity):
events,
key=lambda event: (
event.start,
self.coordinator.data.user["tasksOrder"]["dailys"].index(event.uid),
self.coordinator.data.user.tasksOrder.dailys.index(UUID(event.uid)),
),
)
@@ -254,14 +258,14 @@ class HabiticaTodoRemindersCalendarEntity(HabiticaCalendarEntity):
events = []
for task in self.coordinator.data.tasks:
if task["type"] != HabiticaTaskType.TODO or task["completed"]:
if task.Type is not TaskType.TODO or task.completed:
continue
for reminder in task.get("reminders", []):
for reminder in task.reminders:
# reminders are returned by the API in local time but with wrong
# timezone (UTC) and arbitrary added seconds/microseconds. When
# creating reminders in Habitica only hours and minutes can be defined.
start = datetime.fromisoformat(reminder["time"]).replace(
start = reminder.time.replace(
tzinfo=dt_util.DEFAULT_TIME_ZONE, second=0, microsecond=0
)
end = start + timedelta(hours=1)
@@ -273,14 +277,16 @@ class HabiticaTodoRemindersCalendarEntity(HabiticaCalendarEntity):
if end_date and start > end_date:
# Event starts after date range
continue
if TYPE_CHECKING:
assert task.text
assert task.id
events.append(
CalendarEvent(
start=start,
end=end,
summary=task["text"],
description=task["notes"],
uid=f"{task["id"]}_{reminder["id"]}",
summary=task.text,
description=task.notes,
uid=f"{task.id}_{reminder.id}",
)
)
@@ -298,7 +304,7 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity):
translation_key=HabiticaCalendar.DAILY_REMINDERS,
)
def start(self, reminder_time: str, reminder_date: date) -> datetime:
def start(self, reminder_time: datetime, reminder_date: date) -> datetime:
"""Generate reminder times for dailies.
Reminders for dailies have a datetime but the date part is arbitrary,
@@ -307,12 +313,10 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity):
"""
return datetime.combine(
reminder_date,
datetime.fromisoformat(reminder_time)
.replace(
reminder_time.replace(
second=0,
microsecond=0,
)
.time(),
).time(),
tzinfo=dt_util.DEFAULT_TIME_ZONE,
)
@@ -327,7 +331,7 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity):
start_date = max(start_date, self.start_of_today)
for task in self.coordinator.data.tasks:
if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]):
if not (task.Type is TaskType.DAILY and task.everyX):
continue
recurrences = build_rrule(task)
@@ -339,27 +343,30 @@ class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity):
for recurrence in recurrence_dates:
is_future_event = recurrence > self.start_of_today
is_current_event = (
recurrence <= self.start_of_today and not task["completed"]
recurrence <= self.start_of_today and not task.completed
)
if not is_future_event and not is_current_event:
continue
for reminder in task.get("reminders", []):
start = self.start(reminder["time"], recurrence)
for reminder in task.reminders:
start = self.start(reminder.time, recurrence)
end = start + timedelta(hours=1)
if end < start_date:
# Event ends before date range
continue
if TYPE_CHECKING:
assert task.id
assert task.text
events.append(
CalendarEvent(
start=start,
end=end,
summary=task["text"],
description=task["notes"],
uid=f"{task["id"]}_{reminder["id"]}",
summary=task.text,
description=task.notes,
uid=f"{task.id}_{reminder.id}",
)
)

View File

@@ -2,17 +2,25 @@
from __future__ import annotations
from http import HTTPStatus
from collections.abc import Mapping
import logging
from typing import Any
from typing import TYPE_CHECKING, Any
from aiohttp import ClientResponseError
from habitipy.aio import HabitipyAsync
from aiohttp import ClientError
from habiticalib import (
Habitica,
HabiticaException,
LoginData,
NotAuthorizedError,
UserData,
)
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_API_KEY,
CONF_NAME,
CONF_PASSWORD,
CONF_URL,
CONF_USERNAME,
@@ -25,14 +33,18 @@ from homeassistant.helpers.selector import (
TextSelectorType,
)
from . import HabiticaConfigEntry
from .const import (
CONF_API_USER,
DEFAULT_URL,
DOMAIN,
FORGOT_PASSWORD_URL,
HABITICANS_URL,
SECTION_REAUTH_API_KEY,
SECTION_REAUTH_LOGIN,
SIGN_UP_URL,
SITE_DATA_URL,
X_CLIENT,
)
STEP_ADVANCED_DATA_SCHEMA = vol.Schema(
@@ -61,14 +73,44 @@ STEP_LOGIN_DATA_SCHEMA = vol.Schema(
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{
vol.Required(SECTION_REAUTH_LOGIN): data_entry_flow.section(
vol.Schema(
{
vol.Optional(CONF_USERNAME): TextSelector(
TextSelectorConfig(
type=TextSelectorType.EMAIL,
autocomplete="email",
)
),
vol.Optional(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
},
),
{"collapsed": False},
),
vol.Required(SECTION_REAUTH_API_KEY): data_entry_flow.section(
vol.Schema(
{
vol.Optional(CONF_API_KEY): str,
},
),
{"collapsed": True},
),
}
)
_LOGGER = logging.getLogger(__name__)
class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for habitica."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -93,39 +135,20 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN):
"""
errors: dict[str, str] = {}
if user_input is not None:
try:
session = async_get_clientsession(self.hass)
api = await self.hass.async_add_executor_job(
HabitipyAsync,
{
"login": "",
"password": "",
"url": DEFAULT_URL,
},
)
login_response = await api.user.auth.local.login.post(
session=session,
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
)
except ClientResponseError as ex:
if ex.status == HTTPStatus.UNAUTHORIZED:
errors["base"] = "invalid_auth"
else:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(login_response["id"])
errors, login, user = await self.validate_login(
{**user_input, CONF_URL: DEFAULT_URL}
)
if not errors and login is not None and user is not None:
await self.async_set_unique_id(str(login.id))
self._abort_if_unique_id_configured()
if TYPE_CHECKING:
assert user.profile.name
return self.async_create_entry(
title=login_response["username"],
title=user.profile.name,
data={
CONF_API_USER: login_response["id"],
CONF_API_KEY: login_response["apiToken"],
CONF_USERNAME: login_response["username"],
CONF_API_USER: str(login.id),
CONF_API_KEY: login.apiToken,
CONF_NAME: user.profile.name, # needed for api_call action
CONF_URL: DEFAULT_URL,
CONF_VERIFY_SSL: True,
},
@@ -150,36 +173,19 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN):
"""
errors: dict[str, str] = {}
if user_input is not None:
try:
session = async_get_clientsession(
self.hass, verify_ssl=user_input.get(CONF_VERIFY_SSL, True)
)
api = await self.hass.async_add_executor_job(
HabitipyAsync,
{
"login": user_input[CONF_API_USER],
"password": user_input[CONF_API_KEY],
"url": user_input.get(CONF_URL, DEFAULT_URL),
},
)
api_response = await api.user.get(
session=session,
userFields="auth",
)
except ClientResponseError as ex:
if ex.status == HTTPStatus.UNAUTHORIZED:
errors["base"] = "invalid_auth"
else:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input[CONF_API_USER])
self._abort_if_unique_id_configured()
user_input[CONF_USERNAME] = api_response["auth"]["local"]["username"]
await self.async_set_unique_id(user_input[CONF_API_USER])
self._abort_if_unique_id_configured()
errors, user = await self.validate_api_key(user_input)
if not errors and user is not None:
if TYPE_CHECKING:
assert user.profile.name
return self.async_create_entry(
title=user_input[CONF_USERNAME], data=user_input
title=user.profile.name,
data={
**user_input,
CONF_URL: user_input.get(CONF_URL, DEFAULT_URL),
CONF_NAME: user.profile.name, # needed for api_call action
},
)
return self.async_show_form(
@@ -193,3 +199,120 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN):
"default_url": DEFAULT_URL,
},
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
errors: dict[str, str] = {}
reauth_entry: HabiticaConfigEntry = self._get_reauth_entry()
if user_input is not None:
if user_input[SECTION_REAUTH_LOGIN].get(CONF_USERNAME) and user_input[
SECTION_REAUTH_LOGIN
].get(CONF_PASSWORD):
errors, login, _ = await self.validate_login(
{**reauth_entry.data, **user_input[SECTION_REAUTH_LOGIN]}
)
if not errors and login is not None:
await self.async_set_unique_id(str(login.id))
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_KEY: login.apiToken},
)
elif user_input[SECTION_REAUTH_API_KEY].get(CONF_API_KEY):
errors, user = await self.validate_api_key(
{
**reauth_entry.data,
**user_input[SECTION_REAUTH_API_KEY],
}
)
if not errors and user is not None:
return self.async_update_reload_and_abort(
reauth_entry, data_updates=user_input[SECTION_REAUTH_API_KEY]
)
else:
errors["base"] = "invalid_credentials"
return self.async_show_form(
step_id="reauth_confirm",
data_schema=self.add_suggested_values_to_schema(
data_schema=STEP_REAUTH_DATA_SCHEMA,
suggested_values={
CONF_USERNAME: (
user_input[SECTION_REAUTH_LOGIN].get(CONF_USERNAME)
if user_input
else None,
)
},
),
description_placeholders={
CONF_NAME: reauth_entry.title,
"habiticans": HABITICANS_URL,
},
errors=errors,
)
async def validate_login(
self, user_input: Mapping[str, Any]
) -> tuple[dict[str, str], LoginData | None, UserData | None]:
"""Validate login with login credentials."""
errors: dict[str, str] = {}
session = async_get_clientsession(
self.hass, verify_ssl=user_input.get(CONF_VERIFY_SSL, True)
)
api = Habitica(session=session, x_client=X_CLIENT)
try:
login = await api.login(
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
)
user = await api.get_user(user_fields="profile")
except NotAuthorizedError:
errors["base"] = "invalid_auth"
except (HabiticaException, ClientError):
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return errors, login.data, user.data
return errors, None, None
async def validate_api_key(
self, user_input: Mapping[str, Any]
) -> tuple[dict[str, str], UserData | None]:
"""Validate authentication with api key."""
errors: dict[str, str] = {}
session = async_get_clientsession(
self.hass, verify_ssl=user_input.get(CONF_VERIFY_SSL, True)
)
api = Habitica(
session=session,
x_client=X_CLIENT,
api_user=user_input[CONF_API_USER],
api_key=user_input[CONF_API_KEY],
url=user_input.get(CONF_URL, DEFAULT_URL),
)
try:
user = await api.get_user(user_fields="profile")
except NotAuthorizedError:
errors["base"] = "invalid_auth"
except (HabiticaException, ClientError):
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return errors, user.data
return errors, None

View File

@@ -1,6 +1,6 @@
"""Constants for the habitica integration."""
from homeassistant.const import CONF_PATH
from homeassistant.const import APPLICATION_NAME, CONF_PATH, __version__
CONF_API_USER = "api_user"
@@ -44,9 +44,8 @@ SERVICE_SCORE_REWARD = "score_reward"
SERVICE_TRANSFORMATION = "transformation"
WARRIOR = "warrior"
ROGUE = "rogue"
HEALER = "healer"
MAGE = "wizard"
DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf"
X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"
SECTION_REAUTH_LOGIN = "reauth_login"
SECTION_REAUTH_API_KEY = "reauth_api_key"

View File

@@ -5,16 +5,29 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
from http import HTTPStatus
import logging
from typing import Any
from aiohttp import ClientResponseError
from habitipy.aio import HabitipyAsync
from aiohttp import ClientError
from habiticalib import (
ContentData,
Habitica,
HabiticaException,
NotAuthorizedError,
TaskData,
TaskFilter,
TooManyRequestsError,
UserData,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
)
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -25,10 +38,10 @@ _LOGGER = logging.getLogger(__name__)
@dataclass
class HabiticaData:
"""Coordinator data class."""
"""Habitica data."""
user: dict[str, Any]
tasks: list[dict]
user: UserData
tasks: list[TaskData]
class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
@@ -36,7 +49,7 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
config_entry: ConfigEntry
def __init__(self, hass: HomeAssistant, habitipy: HabitipyAsync) -> None:
def __init__(self, hass: HomeAssistant, habitica: Habitica) -> None:
"""Initialize the Habitica data coordinator."""
super().__init__(
hass,
@@ -50,25 +63,53 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
immediate=False,
),
)
self.api = habitipy
self.content: dict[str, Any] = {}
self.habitica = habitica
self.content: ContentData
async def _async_setup(self) -> None:
"""Set up Habitica integration."""
try:
user = await self.habitica.get_user()
self.content = (
await self.habitica.get_content(user.data.preferences.language)
).data
except NotAuthorizedError as e:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="authentication_failed",
) from e
except TooManyRequestsError as e:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except (HabiticaException, ClientError) as e:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="service_call_exception",
) from e
if not self.config_entry.data.get(CONF_NAME):
self.hass.config_entries.async_update_entry(
self.config_entry,
data={**self.config_entry.data, CONF_NAME: user.data.profile.name},
)
async def _async_update_data(self) -> HabiticaData:
try:
user_response = await self.api.user.get()
tasks_response = await self.api.tasks.user.get()
tasks_response.extend(await self.api.tasks.user.get(type="completedTodos"))
if not self.content:
self.content = await self.api.content.get(
language=user_response["preferences"]["language"]
)
except ClientResponseError as error:
if error.status == HTTPStatus.TOO_MANY_REQUESTS:
_LOGGER.debug("Rate limit exceeded, will try again later")
return self.data
raise UpdateFailed(f"Unable to connect to Habitica: {error}") from error
return HabiticaData(user=user_response, tasks=tasks_response)
user = (await self.habitica.get_user()).data
tasks = (await self.habitica.get_tasks()).data
completed_todos = (
await self.habitica.get_tasks(TaskFilter.COMPLETED_TODOS)
).data
except TooManyRequestsError:
_LOGGER.debug("Rate limit exceeded, will try again later")
return self.data
except (HabiticaException, ClientError) as e:
raise UpdateFailed(f"Unable to connect to Habitica: {e}") from e
else:
return HabiticaData(user=user, tasks=tasks + completed_todos)
async def execute(
self, func: Callable[[HabiticaDataUpdateCoordinator], Any]
@@ -77,12 +118,12 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]):
try:
await func(self)
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except (HabiticaException, ClientError) as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",

View File

@@ -16,12 +16,12 @@ async def async_get_config_entry_diagnostics(
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
habitica_data = await config_entry.runtime_data.api.user.anonymized.get()
habitica_data = await config_entry.runtime_data.habitica.get_user_anonymized()
return {
"config_entry_data": {
CONF_URL: config_entry.data[CONF_URL],
CONF_API_USER: config_entry.data[CONF_API_USER],
},
"habitica_data": habitica_data,
"habitica_data": habitica_data.to_dict()["data"],
}

View File

@@ -5,6 +5,6 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/habitica",
"iot_class": "cloud_polling",
"loggers": ["habitipy", "plumbum"],
"requirements": ["habitipy==0.3.3"]
"loggers": ["habiticalib"],
"requirements": ["habiticalib==0.3.2"]
}

View File

@@ -34,7 +34,7 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: todo
reauthentication-flow: done
test-coverage: done
# Gold

View File

@@ -3,11 +3,20 @@
from __future__ import annotations
from collections.abc import Callable, Mapping
from dataclasses import dataclass
from dataclasses import asdict, dataclass
from enum import StrEnum
import logging
from typing import TYPE_CHECKING, Any
from habiticalib import (
ContentData,
HabiticaClass,
TaskData,
TaskType,
UserData,
deserialize_task,
)
from homeassistant.components.sensor import (
DOMAIN as SENSOR_DOMAIN,
SensorDeviceClass,
@@ -36,10 +45,10 @@ _LOGGER = logging.getLogger(__name__)
class HabitipySensorEntityDescription(SensorEntityDescription):
"""Habitipy Sensor Description."""
value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType]
attributes_fn: (
Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None
) = None
value_fn: Callable[[UserData, ContentData], StateType]
attributes_fn: Callable[[UserData, ContentData], dict[str, Any] | None] | None = (
None
)
entity_picture: str | None = None
@@ -47,7 +56,7 @@ class HabitipySensorEntityDescription(SensorEntityDescription):
class HabitipyTaskSensorEntityDescription(SensorEntityDescription):
"""Habitipy Task Sensor Description."""
value_fn: Callable[[list[dict[str, Any]]], list[dict[str, Any]]]
value_fn: Callable[[list[TaskData]], list[TaskData]]
class HabitipySensorEntity(StrEnum):
@@ -79,75 +88,70 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
HabitipySensorEntityDescription(
key=HabitipySensorEntity.DISPLAY_NAME,
translation_key=HabitipySensorEntity.DISPLAY_NAME,
value_fn=lambda user, _: user.get("profile", {}).get("name"),
value_fn=lambda user, _: user.profile.name,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.HEALTH,
translation_key=HabitipySensorEntity.HEALTH,
suggested_display_precision=0,
value_fn=lambda user, _: user.get("stats", {}).get("hp"),
value_fn=lambda user, _: user.stats.hp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.HEALTH_MAX,
translation_key=HabitipySensorEntity.HEALTH_MAX,
entity_registry_enabled_default=False,
value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"),
value_fn=lambda user, _: 50,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.MANA,
translation_key=HabitipySensorEntity.MANA,
suggested_display_precision=0,
value_fn=lambda user, _: user.get("stats", {}).get("mp"),
value_fn=lambda user, _: user.stats.mp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.MANA_MAX,
translation_key=HabitipySensorEntity.MANA_MAX,
value_fn=lambda user, _: user.get("stats", {}).get("maxMP"),
value_fn=lambda user, _: user.stats.maxMP,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.EXPERIENCE,
translation_key=HabitipySensorEntity.EXPERIENCE,
value_fn=lambda user, _: user.get("stats", {}).get("exp"),
value_fn=lambda user, _: user.stats.exp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.EXPERIENCE_MAX,
translation_key=HabitipySensorEntity.EXPERIENCE_MAX,
value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"),
value_fn=lambda user, _: user.stats.toNextLevel,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.LEVEL,
translation_key=HabitipySensorEntity.LEVEL,
value_fn=lambda user, _: user.get("stats", {}).get("lvl"),
value_fn=lambda user, _: user.stats.lvl,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.GOLD,
translation_key=HabitipySensorEntity.GOLD,
suggested_display_precision=2,
value_fn=lambda user, _: user.get("stats", {}).get("gp"),
value_fn=lambda user, _: user.stats.gp,
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.CLASS,
translation_key=HabitipySensorEntity.CLASS,
value_fn=lambda user, _: user.get("stats", {}).get("class"),
value_fn=lambda user, _: user.stats.Class.value if user.stats.Class else None,
device_class=SensorDeviceClass.ENUM,
options=["warrior", "healer", "wizard", "rogue"],
options=[item.value for item in HabiticaClass],
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.GEMS,
translation_key=HabitipySensorEntity.GEMS,
value_fn=lambda user, _: user.get("balance", 0) * 4,
value_fn=lambda user, _: round(user.balance * 4) if user.balance else None,
suggested_display_precision=0,
entity_picture="shop_gem.png",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.TRINKETS,
translation_key=HabitipySensorEntity.TRINKETS,
value_fn=(
lambda user, _: user.get("purchased", {})
.get("plan", {})
.get("consecutive", {})
.get("trinkets", 0)
),
value_fn=lambda user, _: user.purchased.plan.consecutive.trinkets or 0,
suggested_display_precision=0,
native_unit_of_measurement="",
entity_picture="notif_subscriber_reward.png",
@@ -155,16 +159,16 @@ SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = (
HabitipySensorEntityDescription(
key=HabitipySensorEntity.STRENGTH,
translation_key=HabitipySensorEntity.STRENGTH,
value_fn=lambda user, content: get_attributes_total(user, content, "str"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "str"),
value_fn=lambda user, content: get_attributes_total(user, content, "Str"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "Str"),
suggested_display_precision=0,
native_unit_of_measurement="STR",
),
HabitipySensorEntityDescription(
key=HabitipySensorEntity.INTELLIGENCE,
translation_key=HabitipySensorEntity.INTELLIGENCE,
value_fn=lambda user, content: get_attributes_total(user, content, "int"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "int"),
value_fn=lambda user, content: get_attributes_total(user, content, "Int"),
attributes_fn=lambda user, content: get_attribute_points(user, content, "Int"),
suggested_display_precision=0,
native_unit_of_measurement="INT",
),
@@ -203,7 +207,7 @@ TASKS_MAP = {
"yester_daily": "yesterDaily",
"completed": "completed",
"collapse_checklist": "collapseChecklist",
"type": "type",
"type": "Type",
"notes": "notes",
"tags": "tags",
"value": "value",
@@ -221,26 +225,28 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = (
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.HABITS,
translation_key=HabitipySensorEntity.HABITS,
value_fn=lambda tasks: [r for r in tasks if r.get("type") == "habit"],
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.HABIT],
),
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.DAILIES,
translation_key=HabitipySensorEntity.DAILIES,
value_fn=lambda tasks: [r for r in tasks if r.get("type") == "daily"],
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.DAILY],
entity_registry_enabled_default=False,
),
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.TODOS,
translation_key=HabitipySensorEntity.TODOS,
value_fn=lambda tasks: [
r for r in tasks if r.get("type") == "todo" and not r.get("completed")
],
value_fn=(
lambda tasks: [
r for r in tasks if r.Type is TaskType.TODO and not r.completed
]
),
entity_registry_enabled_default=False,
),
HabitipyTaskSensorEntityDescription(
key=HabitipySensorEntity.REWARDS,
translation_key=HabitipySensorEntity.REWARDS,
value_fn=lambda tasks: [r for r in tasks if r.get("type") == "reward"],
value_fn=lambda tasks: [r for r in tasks if r.Type is TaskType.REWARD],
),
)
@@ -309,15 +315,14 @@ class HabitipyTaskSensor(HabiticaBase, SensorEntity):
attrs = {}
# Map tasks to TASKS_MAP
for received_task in self.entity_description.value_fn(
self.coordinator.data.tasks
):
for task_data in self.entity_description.value_fn(self.coordinator.data.tasks):
received_task = deserialize_task(asdict(task_data))
task_id = received_task[TASKS_MAP_ID]
task = {}
for map_key, map_value in TASKS_MAP.items():
if value := received_task.get(map_value):
task[map_key] = value
attrs[task_id] = task
attrs[str(task_id)] = task
return attrs
async def async_added_to_hass(self) -> None:

View File

@@ -2,11 +2,19 @@
from __future__ import annotations
from http import HTTPStatus
from dataclasses import asdict
import logging
from typing import Any
from typing import TYPE_CHECKING
from aiohttp import ClientResponseError
from aiohttp import ClientError
from habiticalib import (
Direction,
HabiticaException,
NotAuthorizedError,
NotFoundError,
Skill,
TooManyRequestsError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
@@ -88,6 +96,25 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
}
)
SKILL_MAP = {
"pickpocket": Skill.PICKPOCKET,
"backstab": Skill.BACKSTAB,
"smash": Skill.BRUTAL_SMASH,
"fireball": Skill.BURST_OF_FLAMES,
}
COST_MAP = {
"pickpocket": "10 MP",
"backstab": "15 MP",
"smash": "10 MP",
"fireball": "10 MP",
}
ITEMID_MAP = {
"snowball": Skill.SNOWBALL,
"spooky_sparkles": Skill.SPOOKY_SPARKLES,
"seafoam": Skill.SEAFOAM,
"shiny_seed": Skill.SHINY_SEED,
}
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
"""Return config entry or raise if not found or not loaded."""
@@ -123,12 +150,12 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
name = call.data[ATTR_NAME]
path = call.data[ATTR_PATH]
entries = hass.config_entries.async_entries(DOMAIN)
entries: list[HabiticaConfigEntry] = hass.config_entries.async_entries(DOMAIN)
api = None
for entry in entries:
if entry.data[CONF_NAME] == name:
api = entry.runtime_data.api
api = await entry.runtime_data.habitica.habitipy()
break
if api is None:
_LOGGER.error("API_CALL: User '%s' not configured", name)
@@ -151,18 +178,15 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
"""Skill action."""
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
coordinator = entry.runtime_data
skill = {
"pickpocket": {"spellId": "pickPocket", "cost": "10 MP"},
"backstab": {"spellId": "backStab", "cost": "15 MP"},
"smash": {"spellId": "smash", "cost": "10 MP"},
"fireball": {"spellId": "fireball", "cost": "10 MP"},
}
skill = SKILL_MAP[call.data[ATTR_SKILL]]
cost = COST_MAP[call.data[ATTR_SKILL]]
try:
task_id = next(
task["id"]
task.id
for task in coordinator.data.tasks
if call.data[ATTR_TASK] in (task["id"], task.get("alias"))
or call.data[ATTR_TASK] == task["text"]
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
)
except StopIteration as e:
raise ServiceValidationError(
@@ -172,75 +196,76 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
) from e
try:
response: dict[str, Any] = await coordinator.api.user.class_.cast[
skill[call.data[ATTR_SKILL]]["spellId"]
].post(targetId=task_id)
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
if e.status == HTTPStatus.UNAUTHORIZED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="not_enough_mana",
translation_placeholders={
"cost": skill[call.data[ATTR_SKILL]]["cost"],
"mana": f"{int(coordinator.data.user.get("stats", {}).get("mp", 0))} MP",
},
) from e
if e.status == HTTPStatus.NOT_FOUND:
# could also be task not found, but the task is looked up
# before the request, so most likely wrong skill selected
# or the skill hasn't been unlocked yet.
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="skill_not_found",
translation_placeholders={"skill": call.data[ATTR_SKILL]},
) from e
response = await coordinator.habitica.cast_skill(skill, task_id)
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except NotAuthorizedError as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="not_enough_mana",
translation_placeholders={
"cost": cost,
"mana": f"{int(coordinator.data.user.stats.mp or 0)} MP",
},
) from e
except NotFoundError as e:
# could also be task not found, but the task is looked up
# before the request, so most likely wrong skill selected
# or the skill hasn't been unlocked yet.
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="skill_not_found",
translation_placeholders={"skill": call.data[ATTR_SKILL]},
) from e
except (HabiticaException, ClientError) as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
) from e
else:
await coordinator.async_request_refresh()
return response
return asdict(response.data)
async def manage_quests(call: ServiceCall) -> ServiceResponse:
"""Accept, reject, start, leave or cancel quests."""
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
coordinator = entry.runtime_data
COMMAND_MAP = {
SERVICE_ABORT_QUEST: "abort",
SERVICE_ACCEPT_QUEST: "accept",
SERVICE_CANCEL_QUEST: "cancel",
SERVICE_LEAVE_QUEST: "leave",
SERVICE_REJECT_QUEST: "reject",
SERVICE_START_QUEST: "force-start",
FUNC_MAP = {
SERVICE_ABORT_QUEST: coordinator.habitica.abort_quest,
SERVICE_ACCEPT_QUEST: coordinator.habitica.accept_quest,
SERVICE_CANCEL_QUEST: coordinator.habitica.cancel_quest,
SERVICE_LEAVE_QUEST: coordinator.habitica.leave_quest,
SERVICE_REJECT_QUEST: coordinator.habitica.reject_quest,
SERVICE_START_QUEST: coordinator.habitica.start_quest,
}
func = FUNC_MAP[call.service]
try:
return await coordinator.api.groups.party.quests[
COMMAND_MAP[call.service]
].post()
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
if e.status == HTTPStatus.UNAUTHORIZED:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="quest_action_unallowed"
) from e
if e.status == HTTPStatus.NOT_FOUND:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="quest_not_found"
) from e
response = await func()
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except NotAuthorizedError as e:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="quest_action_unallowed"
) from e
except NotFoundError as e:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="quest_not_found"
) from e
except (HabiticaException, ClientError) as e:
raise HomeAssistantError(
translation_domain=DOMAIN, translation_key="service_call_exception"
) from e
else:
return asdict(response.data)
for service in (
SERVICE_ABORT_QUEST,
@@ -262,12 +287,15 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
"""Score a task action."""
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
coordinator = entry.runtime_data
direction = (
Direction.DOWN if call.data.get(ATTR_DIRECTION) == "down" else Direction.UP
)
try:
task_id, task_value = next(
(task["id"], task.get("value"))
(task.id, task.value)
for task in coordinator.data.tasks
if call.data[ATTR_TASK] in (task["id"], task.get("alias"))
or call.data[ATTR_TASK] == task["text"]
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
)
except StopIteration as e:
raise ServiceValidationError(
@@ -276,81 +304,76 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
) from e
if TYPE_CHECKING:
assert task_id
try:
response: dict[str, Any] = (
await coordinator.api.tasks[task_id]
.score[call.data.get(ATTR_DIRECTION, "up")]
.post()
)
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None:
response = await coordinator.habitica.update_score(task_id, direction)
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except NotAuthorizedError as e:
if task_value is not None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="not_enough_gold",
translation_placeholders={
"gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP",
"cost": f"{task_value} GP",
"gold": f"{(coordinator.data.user.stats.gp or 0):.2f} GP",
"cost": f"{task_value:.2f} GP",
},
) from e
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
) from e
except (HabiticaException, ClientError) as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
) from e
else:
await coordinator.async_request_refresh()
return response
return asdict(response.data)
async def transformation(call: ServiceCall) -> ServiceResponse:
"""User a transformation item on a player character."""
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
coordinator = entry.runtime_data
ITEMID_MAP = {
"snowball": {"itemId": "snowball"},
"spooky_sparkles": {"itemId": "spookySparkles"},
"seafoam": {"itemId": "seafoam"},
"shiny_seed": {"itemId": "shinySeed"},
}
item = ITEMID_MAP[call.data[ATTR_ITEM]]
# check if target is self
if call.data[ATTR_TARGET] in (
coordinator.data.user["id"],
coordinator.data.user["profile"]["name"],
coordinator.data.user["auth"]["local"]["username"],
str(coordinator.data.user.id),
coordinator.data.user.profile.name,
coordinator.data.user.auth.local.username,
):
target_id = coordinator.data.user["id"]
target_id = coordinator.data.user.id
else:
# check if target is a party member
try:
party = await coordinator.api.groups.party.members.get()
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
if e.status == HTTPStatus.NOT_FOUND:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="party_not_found",
) from e
party = await coordinator.habitica.get_group_members(public_fields=True)
except NotFoundError as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="party_not_found",
) from e
except (ClientError, HabiticaException) as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
) from e
try:
target_id = next(
member["id"]
for member in party
if call.data[ATTR_TARGET].lower()
member.id
for member in party.data
if member.id
and call.data[ATTR_TARGET].lower()
in (
member["id"],
member["auth"]["local"]["username"].lower(),
member["profile"]["name"].lower(),
str(member.id),
str(member.auth.local.username).lower(),
str(member.profile.name).lower(),
)
)
except StopIteration as e:
@@ -360,27 +383,25 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
translation_placeholders={"target": f"'{call.data[ATTR_TARGET]}'"},
) from e
try:
response: dict[str, Any] = await coordinator.api.user.class_.cast[
ITEMID_MAP[call.data[ATTR_ITEM]]["itemId"]
].post(targetId=target_id)
except ClientResponseError as e:
if e.status == HTTPStatus.TOO_MANY_REQUESTS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
if e.status == HTTPStatus.UNAUTHORIZED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="item_not_found",
translation_placeholders={"item": call.data[ATTR_ITEM]},
) from e
response = await coordinator.habitica.cast_skill(item, target_id)
except TooManyRequestsError as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="setup_rate_limit_exception",
) from e
except NotAuthorizedError as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="item_not_found",
translation_placeholders={"item": call.data[ATTR_ITEM]},
) from e
except (HabiticaException, ClientError) as e:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="service_call_exception",
) from e
else:
return response
return asdict(response.data)
hass.services.async_register(
DOMAIN,

View File

@@ -10,12 +10,15 @@
},
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"unique_id_mismatch": "Hmm, those login details are correct, but they're not for this adventurer. Got another account to try?",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
"unknown": "[%key:common::config_flow::error::unknown%]",
"invalid_credentials": "Input is incomplete. You must provide either your login details or an API token"
},
"step": {
"user": {
@@ -49,9 +52,38 @@
"data_description": {
"url": "URL of the Habitica installation to connect to. Defaults to `{default_url}`",
"api_user": "User ID of your Habitica account",
"api_key": "API Token of the Habitica account"
"api_key": "API Token of the Habitica account",
"verify_ssl": "Enable SSL certificate verification for secure connections. Disable only if connecting to a Habitica instance using a self-signed certificate"
},
"description": "You can retrieve your `User ID` and `API Token` from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to"
},
"reauth_confirm": {
"title": "Re-authorize {name} with Habitica",
"description": "![Habiticans]({habiticans}) It seems your API token for **{name}** has been reset. To re-authorize the integration, you can either log in with your username or email, and password, or directly provide your new API token.",
"sections": {
"reauth_login": {
"name": "Re-authorize via login",
"description": "Enter your login details below to re-authorize the Home Assistant integration with Habitica",
"data": {
"username": "[%key:component::habitica::config::step::login::data::username%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"username": "[%key:component::habitica::config::step::login::data_description::username%]",
"password": "[%key:component::habitica::config::step::login::data_description::password%]"
}
},
"reauth_api_key": {
"description": "Enter your new API token below. You can find it in Habitica under 'Settings -> Site Data'",
"name": "Re-authorize via API Token",
"data": {
"api_key": "[%key:component::habitica::config::step::advanced::data::api_key%]"
},
"data_description": {
"api_key": "[%key:component::habitica::config::step::advanced::data_description::api_key%]"
}
}
}
}
}
},
@@ -365,6 +397,9 @@
},
"item_not_found": {
"message": "Unable to use {item}, you don't own this item."
},
"authentication_failed": {
"message": "Authentication failed. It looks like your API token has been reset. Please re-authenticate using your new token"
}
},
"issues": {

View File

@@ -28,7 +28,7 @@ class HabiticaSwitchEntityDescription(SwitchEntityDescription):
turn_on_fn: Callable[[HabiticaDataUpdateCoordinator], Any]
turn_off_fn: Callable[[HabiticaDataUpdateCoordinator], Any]
is_on_fn: Callable[[HabiticaData], bool]
is_on_fn: Callable[[HabiticaData], bool | None]
class HabiticaSwitchEntity(StrEnum):
@@ -42,9 +42,9 @@ SWTICH_DESCRIPTIONS: tuple[HabiticaSwitchEntityDescription, ...] = (
key=HabiticaSwitchEntity.SLEEP,
translation_key=HabiticaSwitchEntity.SLEEP,
device_class=SwitchDeviceClass.SWITCH,
turn_on_fn=lambda coordinator: coordinator.api["user"]["sleep"].post(),
turn_off_fn=lambda coordinator: coordinator.api["user"]["sleep"].post(),
is_on_fn=lambda data: data.user["preferences"]["sleep"],
turn_on_fn=lambda coordinator: coordinator.habitica.toggle_sleep(),
turn_off_fn=lambda coordinator: coordinator.habitica.toggle_sleep(),
is_on_fn=lambda data: data.user.preferences.sleep,
),
)

View File

@@ -2,11 +2,12 @@
from __future__ import annotations
import datetime
from enum import StrEnum
from typing import TYPE_CHECKING
from uuid import UUID
from aiohttp import ClientResponseError
from aiohttp import ClientError
from habiticalib import Direction, HabiticaException, Task, TaskType
from homeassistant.components import persistent_notification
from homeassistant.components.todo import (
@@ -24,7 +25,7 @@ from homeassistant.util import dt as dt_util
from .const import ASSETS_URL, DOMAIN
from .coordinator import HabiticaDataUpdateCoordinator
from .entity import HabiticaBase
from .types import HabiticaConfigEntry, HabiticaTaskType
from .types import HabiticaConfigEntry
from .util import next_due_date
PARALLEL_UPDATES = 1
@@ -70,8 +71,8 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
"""Delete Habitica tasks."""
if len(uids) > 1 and self.entity_description.key is HabiticaTodoList.TODOS:
try:
await self.coordinator.api.tasks.clearCompletedTodos.post()
except ClientResponseError as e:
await self.coordinator.habitica.delete_completed_todos()
except (HabiticaException, ClientError) as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="delete_completed_todos_failed",
@@ -79,8 +80,8 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
else:
for task_id in uids:
try:
await self.coordinator.api.tasks[task_id].delete()
except ClientResponseError as e:
await self.coordinator.habitica.delete_task(UUID(task_id))
except (HabiticaException, ClientError) as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key=f"delete_{self.entity_description.key}_failed",
@@ -106,9 +107,8 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
pos = 0
try:
await self.coordinator.api.tasks[uid].move.to[str(pos)].post()
except ClientResponseError as e:
await self.coordinator.habitica.reorder_task(UUID(uid), pos)
except (HabiticaException, ClientError) as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key=f"move_{self.entity_description.key}_item_failed",
@@ -118,12 +118,14 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
# move tasks in the coordinator until we have fresh data
tasks = self.coordinator.data.tasks
new_pos = (
tasks.index(next(task for task in tasks if task["id"] == previous_uid))
tasks.index(
next(task for task in tasks if task.id == UUID(previous_uid))
)
+ 1
if previous_uid
else 0
)
old_pos = tasks.index(next(task for task in tasks if task["id"] == uid))
old_pos = tasks.index(next(task for task in tasks if task.id == UUID(uid)))
tasks.insert(new_pos, tasks.pop(old_pos))
await self.coordinator.async_request_refresh()
@@ -138,14 +140,17 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
if TYPE_CHECKING:
assert item.uid
assert current_item
assert item.summary
task = Task(
text=item.summary,
notes=item.description or "",
)
if (
self.entity_description.key is HabiticaTodoList.TODOS
and item.due is not None
): # Only todos support a due date.
date = item.due.isoformat()
else:
date = None
task["date"] = item.due
if (
item.summary != current_item.summary
@@ -153,13 +158,9 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
or item.due != current_item.due
):
try:
await self.coordinator.api.tasks[item.uid].put(
text=item.summary,
notes=item.description or "",
date=date,
)
await self.coordinator.habitica.update_task(UUID(item.uid), task)
refresh_required = True
except ClientResponseError as e:
except (HabiticaException, ClientError) as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key=f"update_{self.entity_description.key}_item_failed",
@@ -172,32 +173,33 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity):
current_item.status is TodoItemStatus.NEEDS_ACTION
and item.status == TodoItemStatus.COMPLETED
):
score_result = (
await self.coordinator.api.tasks[item.uid].score["up"].post()
score_result = await self.coordinator.habitica.update_score(
UUID(item.uid), Direction.UP
)
refresh_required = True
elif (
current_item.status is TodoItemStatus.COMPLETED
and item.status == TodoItemStatus.NEEDS_ACTION
):
score_result = (
await self.coordinator.api.tasks[item.uid].score["down"].post()
score_result = await self.coordinator.habitica.update_score(
UUID(item.uid), Direction.DOWN
)
refresh_required = True
else:
score_result = None
except ClientResponseError as e:
except (HabiticaException, ClientError) as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key=f"score_{self.entity_description.key}_item_failed",
translation_placeholders={"name": item.summary or ""},
) from e
if score_result and (drop := score_result.get("_tmp", {}).get("drop", False)):
if score_result and score_result.data.tmp.drop.key:
drop = score_result.data.tmp.drop
msg = (
f"![{drop["key"]}]({ASSETS_URL}Pet_{drop["type"]}_{drop["key"]}.png)\n"
f"{drop["dialog"]}"
f"![{drop.key}]({ASSETS_URL}Pet_{drop.Type}_{drop.key}.png)\n"
f"{drop.dialog}"
)
persistent_notification.async_create(
self.hass, message=msg, title="Habitica"
@@ -229,38 +231,36 @@ class HabiticaTodosListEntity(BaseHabiticaListEntity):
return [
*(
TodoItem(
uid=task["id"],
summary=task["text"],
description=task["notes"],
due=(
dt_util.as_local(
datetime.datetime.fromisoformat(task["date"])
).date()
if task.get("date")
else None
),
uid=str(task.id),
summary=task.text,
description=task.notes,
due=dt_util.as_local(task.date).date() if task.date else None,
status=(
TodoItemStatus.NEEDS_ACTION
if not task["completed"]
if not task.completed
else TodoItemStatus.COMPLETED
),
)
for task in self.coordinator.data.tasks
if task["type"] == HabiticaTaskType.TODO
if task.Type is TaskType.TODO
),
]
async def async_create_todo_item(self, item: TodoItem) -> None:
"""Create a Habitica todo."""
if TYPE_CHECKING:
assert item.summary
assert item.description
try:
await self.coordinator.api.tasks.user.post(
text=item.summary,
type=HabiticaTaskType.TODO,
notes=item.description,
date=item.due.isoformat() if item.due else None,
await self.coordinator.habitica.create_task(
Task(
text=item.summary,
type=TaskType.TODO,
notes=item.description,
date=item.due,
)
)
except ClientResponseError as e:
except (HabiticaException, ClientError) as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key=f"create_{self.entity_description.key}_item_failed",
@@ -295,23 +295,23 @@ class HabiticaDailiesListEntity(BaseHabiticaListEntity):
that have been completed but forgotten to mark as completed before resetting the dailies.
Changes of the date input field in Home Assistant will be ignored.
"""
last_cron = self.coordinator.data.user["lastCron"]
if TYPE_CHECKING:
assert self.coordinator.data.user.lastCron
return [
*(
TodoItem(
uid=task["id"],
summary=task["text"],
description=task["notes"],
due=next_due_date(task, last_cron),
uid=str(task.id),
summary=task.text,
description=task.notes,
due=next_due_date(task, self.coordinator.data.user.lastCron),
status=(
TodoItemStatus.COMPLETED
if task["completed"]
if task.completed
else TodoItemStatus.NEEDS_ACTION
),
)
for task in self.coordinator.data.tasks
if task["type"] == HabiticaTaskType.DAILY
if task.Type is TaskType.DAILY
)
]

View File

@@ -2,9 +2,10 @@
from __future__ import annotations
from dataclasses import fields
import datetime
from math import floor
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING
from dateutil.rrule import (
DAILY,
@@ -20,6 +21,7 @@ from dateutil.rrule import (
YEARLY,
rrule,
)
from habiticalib import ContentData, Frequency, TaskData, UserData
from homeassistant.components.automation import automations_with_entity
from homeassistant.components.script import scripts_with_entity
@@ -27,50 +29,32 @@ from homeassistant.core import HomeAssistant
from homeassistant.util import dt as dt_util
def next_due_date(task: dict[str, Any], last_cron: str) -> datetime.date | None:
def next_due_date(task: TaskData, today: datetime.datetime) -> datetime.date | None:
"""Calculate due date for dailies and yesterdailies."""
if task["everyX"] == 0 or not task.get("nextDue"): # grey dailies never become due
if task.everyX == 0 or not task.nextDue: # grey dailies never become due
return None
today = to_date(last_cron)
startdate = to_date(task["startDate"])
if TYPE_CHECKING:
assert today
assert startdate
assert task.startDate
if task["isDue"] and not task["completed"]:
return to_date(last_cron)
if task.isDue is True and not task.completed:
return dt_util.as_local(today).date()
if startdate > today:
if task["frequency"] == "daily" or (
task["frequency"] in ("monthly", "yearly") and task["daysOfMonth"]
if task.startDate > today:
if task.frequency is Frequency.DAILY or (
task.frequency in (Frequency.MONTHLY, Frequency.YEARLY) and task.daysOfMonth
):
return startdate
return dt_util.as_local(task.startDate).date()
if (
task["frequency"] in ("weekly", "monthly")
and (nextdue := to_date(task["nextDue"][0]))
and startdate > nextdue
task.frequency in (Frequency.WEEKLY, Frequency.MONTHLY)
and (nextdue := task.nextDue[0])
and task.startDate > nextdue
):
return to_date(task["nextDue"][1])
return dt_util.as_local(task.nextDue[1]).date()
return to_date(task["nextDue"][0])
def to_date(date: str) -> datetime.date | None:
"""Convert an iso date to a datetime.date object."""
try:
return dt_util.as_local(datetime.datetime.fromisoformat(date)).date()
except ValueError:
# sometimes nextDue dates are JavaScript datetime strings instead of iso:
# "Mon May 06 2024 00:00:00 GMT+0200"
try:
return dt_util.as_local(
datetime.datetime.strptime(date, "%a %b %d %Y %H:%M:%S %Z%z")
).date()
except ValueError:
return None
return dt_util.as_local(task.nextDue[0]).date()
def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]:
@@ -84,30 +68,27 @@ FREQUENCY_MAP = {"daily": DAILY, "weekly": WEEKLY, "monthly": MONTHLY, "yearly":
WEEKDAY_MAP = {"m": MO, "t": TU, "w": WE, "th": TH, "f": FR, "s": SA, "su": SU}
def build_rrule(task: dict[str, Any]) -> rrule:
def build_rrule(task: TaskData) -> rrule:
"""Build rrule string."""
rrule_frequency = FREQUENCY_MAP.get(task["frequency"], DAILY)
weekdays = [
WEEKDAY_MAP[day] for day, is_active in task["repeat"].items() if is_active
]
if TYPE_CHECKING:
assert task.frequency
assert task.everyX
rrule_frequency = FREQUENCY_MAP.get(task.frequency, DAILY)
weekdays = [day for key, day in WEEKDAY_MAP.items() if getattr(task.repeat, key)]
bymonthday = (
task["daysOfMonth"]
if rrule_frequency == MONTHLY and task["daysOfMonth"]
else None
task.daysOfMonth if rrule_frequency == MONTHLY and task.daysOfMonth else None
)
bysetpos = None
if rrule_frequency == MONTHLY and task["weeksOfMonth"]:
bysetpos = task["weeksOfMonth"]
if rrule_frequency == MONTHLY and task.weeksOfMonth:
bysetpos = task.weeksOfMonth
weekdays = weekdays if weekdays else [MO]
return rrule(
freq=rrule_frequency,
interval=task["everyX"],
dtstart=dt_util.start_of_local_day(
datetime.datetime.fromisoformat(task["startDate"])
),
interval=task.everyX,
dtstart=dt_util.start_of_local_day(task.startDate),
byweekday=weekdays if rrule_frequency in [WEEKLY, MONTHLY] else None,
bymonthday=bymonthday,
bysetpos=bysetpos,
@@ -143,48 +124,37 @@ def get_recurrence_rule(recurrence: rrule) -> str:
def get_attribute_points(
user: dict[str, Any], content: dict[str, Any], attribute: str
user: UserData, content: ContentData, attribute: str
) -> dict[str, float]:
"""Get modifiers contributing to strength attribute."""
gear_set = {
"weapon",
"armor",
"head",
"shield",
"back",
"headAccessory",
"eyewear",
"body",
}
"""Get modifiers contributing to STR/INT/CON/PER attributes."""
equipment = sum(
stats[attribute]
for gear in gear_set
if (equipped := user["items"]["gear"]["equipped"].get(gear))
and (stats := content["gear"]["flat"].get(equipped))
getattr(stats, attribute)
for gear in fields(user.items.gear.equipped)
if (equipped := getattr(user.items.gear.equipped, gear.name))
and (stats := content.gear.flat[equipped])
)
class_bonus = sum(
stats[attribute] / 2
for gear in gear_set
if (equipped := user["items"]["gear"]["equipped"].get(gear))
and (stats := content["gear"]["flat"].get(equipped))
and stats["klass"] == user["stats"]["class"]
getattr(stats, attribute) / 2
for gear in fields(user.items.gear.equipped)
if (equipped := getattr(user.items.gear.equipped, gear.name))
and (stats := content.gear.flat[equipped])
and stats.klass == user.stats.Class
)
if TYPE_CHECKING:
assert user.stats.lvl
return {
"level": min(floor(user["stats"]["lvl"] / 2), 50),
"level": min(floor(user.stats.lvl / 2), 50),
"equipment": equipment,
"class": class_bonus,
"allocated": user["stats"][attribute],
"buffs": user["stats"]["buffs"][attribute],
"allocated": getattr(user.stats, attribute),
"buffs": getattr(user.stats.buffs, attribute),
}
def get_attributes_total(
user: dict[str, Any], content: dict[str, Any], attribute: str
) -> int:
def get_attributes_total(user: UserData, content: ContentData, attribute: str) -> int:
"""Get total attribute points."""
return floor(
sum(value for value in get_attribute_points(user, content, attribute).values())

View File

@@ -0,0 +1 @@
"""Virtual integration: Harvey."""

View File

@@ -0,0 +1,6 @@
{
"domain": "harvey",
"name": "Harvey",
"integration_type": "virtual",
"supported_by": "aquacell"
}

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass
from datetime import timedelta
import logging
from pyheos import Heos, HeosError, HeosPlayer, const as heos_const
from pyheos import Heos, HeosError, HeosOptions, HeosPlayer, const as heos_const
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform
@@ -58,9 +58,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: HeosConfigEntry) -> bool
host = entry.data[CONF_HOST]
# Setting all_progress_events=False ensures that we only receive a
# media position update upon start of playback or when media changes
controller = Heos(host, all_progress_events=False)
controller = Heos(HeosOptions(host, all_progress_events=False, auto_reconnect=True))
try:
await controller.connect(auto_reconnect=True)
await controller.connect()
# Auto reconnect only operates if initial connection was successful.
except HeosError as error:
await controller.disconnect()

View File

@@ -3,7 +3,7 @@
from typing import TYPE_CHECKING, Any
from urllib.parse import urlparse
from pyheos import Heos, HeosError
from pyheos import Heos, HeosError, HeosOptions
import voluptuous as vol
from homeassistant.components import ssdp
@@ -20,7 +20,7 @@ def format_title(host: str) -> str:
async def _validate_host(host: str, errors: dict[str, str]) -> bool:
"""Validate host is reachable, return True, otherwise populate errors and return False."""
heos = Heos(host)
heos = Heos(HeosOptions(host, events=False, heart_beat=False))
try:
await heos.connect()
except HeosError:

View File

@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/heos",
"iot_class": "local_push",
"loggers": ["pyheos"],
"requirements": ["pyheos==0.7.2"],
"requirements": ["pyheos==0.8.0"],
"single_config_entry": true,
"ssdp": [
{

View File

@@ -118,9 +118,7 @@ class HistoryStats:
<= current_period_end_timestamp
):
self._history_current_period.append(
HistoryState(
new_state.state, new_state.last_changed.timestamp()
)
HistoryState(new_state.state, new_state.last_changed_timestamp)
)
new_data = True
if not new_data and current_period_end_timestamp < now_timestamp:
@@ -131,6 +129,16 @@ class HistoryStats:
await self._async_history_from_db(
current_period_start_timestamp, current_period_end_timestamp
)
if event and (new_state := event.data["new_state"]) is not None:
if (
current_period_start_timestamp
<= floored_timestamp(new_state.last_changed)
<= current_period_end_timestamp
):
self._history_current_period.append(
HistoryState(new_state.state, new_state.last_changed_timestamp)
)
self._previous_run_before_start = False
seconds_matched, match_count = self._async_compute_seconds_and_changes(

View File

@@ -113,12 +113,17 @@ class HiveBinarySensorEntity(HiveEntity, BinarySensorEntity):
await self.hive.session.updateData(self.device)
self.device = await self.hive.sensor.getSensor(self.device)
self.attributes = self.device.get("attributes", {})
self._attr_is_on = self.device["status"]["state"]
if self.device["hiveType"] != "Connectivity":
self._attr_available = self.device["deviceData"].get("online")
self._attr_available = (
self.device["deviceData"].get("online") and "status" in self.device
)
else:
self._attr_available = True
if self._attr_available:
self._attr_is_on = self.device["status"].get("state")
class HiveSensorEntity(HiveEntity, BinarySensorEntity):
"""Hive Sensor Entity."""

View File

@@ -12,5 +12,6 @@
"documentation": "https://www.home-assistant.io/integrations/idasen_desk",
"integration_type": "device",
"iot_class": "local_push",
"quality_scale": "bronze",
"requirements": ["idasen-ha==2.6.3"]
}

View File

@@ -17,9 +17,9 @@ rules:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: todo
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: todo
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done

View File

@@ -20,6 +20,8 @@ from . import InComfortConfigEntry
from .coordinator import InComfortDataCoordinator
from .entity import IncomfortBoilerEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IncomfortBinarySensorEntityDescription(BinarySensorEntityDescription):

View File

@@ -22,6 +22,8 @@ from .const import DOMAIN
from .coordinator import InComfortDataCoordinator
from .entity import IncomfortEntity
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -22,6 +22,8 @@ from . import InComfortConfigEntry
from .coordinator import InComfortDataCoordinator
from .entity import IncomfortBoilerEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IncomfortSensorEntityDescription(SensorEntityDescription):

View File

@@ -20,6 +20,8 @@ _LOGGER = logging.getLogger(__name__)
HEATER_ATTRS = ["display_code", "display_text", "is_burning"]
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ipma",
"iot_class": "cloud_polling",
"loggers": ["geopy", "pyipma"],
"requirements": ["pyipma==3.0.7"]
"requirements": ["pyipma==3.0.8"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pyiqvia"],
"requirements": ["numpy==2.2.0", "pyiqvia==2022.04.0"]
"requirements": ["numpy==2.2.1", "pyiqvia==2022.04.0"]
}

View File

@@ -27,9 +27,11 @@ from .coordinator import (
PLATFORMS: list[Platform] = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
Platform.UPDATE,
]

View File

@@ -0,0 +1,85 @@
"""Button platform for IronOS integration."""
from __future__ import annotations
from dataclasses import dataclass
from enum import StrEnum
from pynecil import CharSetting
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import IronOSConfigEntry
from .coordinator import IronOSCoordinators
from .entity import IronOSBaseEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IronOSButtonEntityDescription(ButtonEntityDescription):
"""Describes IronOS button entity."""
characteristic: CharSetting
class IronOSButton(StrEnum):
"""Button controls for IronOS device."""
SETTINGS_RESET = "settings_reset"
SETTINGS_SAVE = "settings_save"
BUTTON_DESCRIPTIONS: tuple[IronOSButtonEntityDescription, ...] = (
IronOSButtonEntityDescription(
key=IronOSButton.SETTINGS_RESET,
translation_key=IronOSButton.SETTINGS_RESET,
characteristic=CharSetting.SETTINGS_RESET,
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
IronOSButtonEntityDescription(
key=IronOSButton.SETTINGS_SAVE,
translation_key=IronOSButton.SETTINGS_SAVE,
characteristic=CharSetting.SETTINGS_SAVE,
entity_category=EntityCategory.CONFIG,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: IronOSConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up button entities from a config entry."""
coordinators = entry.runtime_data
async_add_entities(
IronOSButtonEntity(coordinators, description)
for description in BUTTON_DESCRIPTIONS
)
class IronOSButtonEntity(IronOSBaseEntity, ButtonEntity):
"""Implementation of a IronOS button entity."""
entity_description: IronOSButtonEntityDescription
def __init__(
self,
coordinators: IronOSCoordinators,
entity_description: IronOSButtonEntityDescription,
) -> None:
"""Initialize the select entity."""
super().__init__(coordinators.live_data, entity_description)
self.settings = coordinators.settings
async def async_press(self) -> None:
"""Handle the button press."""
await self.settings.write(self.entity_description.characteristic, True)

View File

@@ -5,8 +5,10 @@ from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
import logging
from typing import cast
from pynecil import (
CharSetting,
CommunicationError,
DeviceInfoResponse,
IronOSUpdate,
@@ -19,6 +21,7 @@ from pynecil import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@@ -147,3 +150,21 @@ class IronOSSettingsCoordinator(IronOSBaseCoordinator[SettingsDataResponse]):
_LOGGER.debug("Failed to fetch settings", exc_info=e)
return self.data or SettingsDataResponse()
async def write(self, characteristic: CharSetting, value: bool) -> None:
"""Write value to the settings characteristic."""
try:
await self.device.write(characteristic, value)
except CommunicationError as e:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="submit_setting_failed",
) from e
# prevent switch bouncing while waiting for coordinator to finish refresh
self.data.update(
cast(SettingsDataResponse, {characteristic.name.lower(): value})
)
self.async_update_listeners()
await self.async_request_refresh()

View File

@@ -8,6 +8,14 @@
}
}
},
"button": {
"settings_save": {
"default": "mdi:content-save-cog"
},
"settings_reset": {
"default": "mdi:refresh"
}
},
"number": {
"setpoint_temperature": {
"default": "mdi:thermometer"
@@ -149,6 +157,44 @@
"estimated_power": {
"default": "mdi:flash"
}
},
"switch": {
"animation_loop": {
"default": "mdi:play-box",
"state": {
"on": "mdi:animation-play"
}
},
"calibrate_cjc": {
"default": "mdi:tune-vertical"
},
"cooling_temp_blink": {
"default": "mdi:alarm-light-outline",
"state": {
"off": "mdi:alarm-light-off-outline"
}
},
"display_invert": {
"default": "mdi:invert-colors"
},
"invert_buttons": {
"default": "mdi:plus-minus-variant"
},
"usb_pd_mode": {
"default": "mdi:meter-electric-outline"
},
"idle_screen_details": {
"default": "mdi:card-bulleted-outline",
"state": {
"off": "mdi:card-bulleted-off-outline"
}
},
"solder_screen_details": {
"default": "mdi:card-bulleted-outline",
"state": {
"off": "mdi:card-bulleted-off-outline"
}
}
}
}
}

View File

@@ -13,5 +13,5 @@
"documentation": "https://www.home-assistant.io/integrations/iron_os",
"iot_class": "local_polling",
"loggers": ["pynecil"],
"requirements": ["pynecil==2.1.0"]
"requirements": ["pynecil==3.0.1"]
}

View File

@@ -26,9 +26,7 @@ rules:
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not have actions
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt

View File

@@ -29,6 +29,14 @@
"name": "Soldering tip"
}
},
"button": {
"settings_save": {
"name": "Save settings"
},
"settings_reset": {
"name": "Restore default settings"
}
},
"number": {
"setpoint_temperature": {
"name": "Setpoint temperature"
@@ -214,6 +222,32 @@
"estimated_power": {
"name": "Estimated power"
}
},
"switch": {
"animation_loop": {
"name": "Animation loop"
},
"cooling_temp_blink": {
"name": "Cool down screen flashing"
},
"idle_screen_details": {
"name": "Detailed idle screen"
},
"solder_screen_details": {
"name": "Detailed solder screen"
},
"invert_buttons": {
"name": "Swap +/- buttons"
},
"display_invert": {
"name": "Invert screen"
},
"calibrate_cjc": {
"name": "Calibrate CJC"
},
"usb_pd_mode": {
"name": "Power Delivery 3.1 EPR"
}
}
},
"exceptions": {

View File

@@ -0,0 +1,163 @@
"""Switch platform for IronOS integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from typing import Any
from pynecil import CharSetting, SettingsDataResponse
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import IronOSConfigEntry
from .coordinator import IronOSCoordinators
from .entity import IronOSBaseEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IronOSSwitchEntityDescription(SwitchEntityDescription):
"""Describes IronOS switch entity."""
is_on_fn: Callable[[SettingsDataResponse], bool | None]
characteristic: CharSetting
class IronOSSwitch(StrEnum):
"""Switch controls for IronOS device."""
ANIMATION_LOOP = "animation_loop"
COOLING_TEMP_BLINK = "cooling_temp_blink"
IDLE_SCREEN_DETAILS = "idle_screen_details"
SOLDER_SCREEN_DETAILS = "solder_screen_details"
INVERT_BUTTONS = "invert_buttons"
DISPLAY_INVERT = "display_invert"
CALIBRATE_CJC = "calibrate_cjc"
USB_PD_MODE = "usb_pd_mode"
SWITCH_DESCRIPTIONS: tuple[IronOSSwitchEntityDescription, ...] = (
IronOSSwitchEntityDescription(
key=IronOSSwitch.ANIMATION_LOOP,
translation_key=IronOSSwitch.ANIMATION_LOOP,
characteristic=CharSetting.ANIMATION_LOOP,
is_on_fn=lambda x: x.get("animation_loop"),
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.COOLING_TEMP_BLINK,
translation_key=IronOSSwitch.COOLING_TEMP_BLINK,
characteristic=CharSetting.COOLING_TEMP_BLINK,
is_on_fn=lambda x: x.get("cooling_temp_blink"),
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.IDLE_SCREEN_DETAILS,
translation_key=IronOSSwitch.IDLE_SCREEN_DETAILS,
characteristic=CharSetting.IDLE_SCREEN_DETAILS,
is_on_fn=lambda x: x.get("idle_screen_details"),
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.SOLDER_SCREEN_DETAILS,
translation_key=IronOSSwitch.SOLDER_SCREEN_DETAILS,
characteristic=CharSetting.SOLDER_SCREEN_DETAILS,
is_on_fn=lambda x: x.get("solder_screen_details"),
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.INVERT_BUTTONS,
translation_key=IronOSSwitch.INVERT_BUTTONS,
characteristic=CharSetting.INVERT_BUTTONS,
is_on_fn=lambda x: x.get("invert_buttons"),
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.DISPLAY_INVERT,
translation_key=IronOSSwitch.DISPLAY_INVERT,
characteristic=CharSetting.DISPLAY_INVERT,
is_on_fn=lambda x: x.get("display_invert"),
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.CALIBRATE_CJC,
translation_key=IronOSSwitch.CALIBRATE_CJC,
characteristic=CharSetting.CALIBRATE_CJC,
is_on_fn=lambda x: x.get("calibrate_cjc"),
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
IronOSSwitchEntityDescription(
key=IronOSSwitch.USB_PD_MODE,
translation_key=IronOSSwitch.USB_PD_MODE,
characteristic=CharSetting.USB_PD_MODE,
is_on_fn=lambda x: x.get("usb_pd_mode"),
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: IronOSConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up switches from a config entry."""
coordinators = entry.runtime_data
async_add_entities(
IronOSSwitchEntity(coordinators, description)
for description in SWITCH_DESCRIPTIONS
)
class IronOSSwitchEntity(IronOSBaseEntity, SwitchEntity):
"""Representation of a IronOS Switch."""
entity_description: IronOSSwitchEntityDescription
def __init__(
self,
coordinators: IronOSCoordinators,
entity_description: IronOSSwitchEntityDescription,
) -> None:
"""Initialize the switch entity."""
super().__init__(coordinators.live_data, entity_description)
self.settings = coordinators.settings
@property
def is_on(self) -> bool | None:
"""Return the state of the device."""
return self.entity_description.is_on_fn(
self.settings.data,
)
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
await self.settings.write(self.entity_description.characteristic, True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity on."""
await self.settings.write(self.entity_description.characteristic, False)
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
await super().async_added_to_hass()
self.async_on_remove(
self.settings.async_add_listener(
self._handle_coordinator_update, self.entity_description.characteristic
)
)
await self.settings.async_request_refresh()

View File

@@ -6,5 +6,5 @@
"iot_class": "local_polling",
"loggers": ["keba_kecontact"],
"quality_scale": "legacy",
"requirements": ["keba-kecontact==1.1.0"]
"requirements": ["keba-kecontact==1.3.0"]
}

View File

@@ -427,7 +427,7 @@ class KNXClimate(KnxYamlEntity, ClimateEntity):
self._device.mode.xknx.devices.async_remove(self._device.mode)
await super().async_will_remove_from_hass()
def after_update_callback(self, _device: XknxDevice) -> None:
def after_update_callback(self, device: XknxDevice) -> None:
"""Call after device was updated."""
if self._device.mode is not None and self._device.mode.supports_controller_mode:
hvac_mode = CONTROLLER_MODES.get(
@@ -435,4 +435,4 @@ class KNXClimate(KnxYamlEntity, ClimateEntity):
)
if hvac_mode is not HVACMode.OFF:
self._last_hvac_mode = hvac_mode
super().after_update_callback(_device)
super().after_update_callback(device)

View File

@@ -69,7 +69,7 @@ class _KnxEntityBase(Entity):
"""Request a state update from KNX bus."""
await self._device.sync()
def after_update_callback(self, _device: XknxDevice) -> None:
def after_update_callback(self, device: XknxDevice) -> None:
"""Call after device was updated."""
self.async_write_ha_state()

View File

@@ -12,7 +12,7 @@
"requirements": [
"xknx==3.4.0",
"xknxproject==3.8.1",
"knx-frontend==2024.11.16.205004"
"knx-frontend==2024.12.26.233449"
],
"single_config_entry": true
}

View File

@@ -0,0 +1,117 @@
rules:
# Bronze
action-setup: done
appropriate-polling:
status: exempt
comment: |
This integration is push-based.
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: todo
entity-event-setup: done
entity-unique-id: done
has-entity-name:
status: exempt
comment: |
YAML entities don't support devices. UI entities do and use `has_entity_name`.
runtime-data:
status: exempt
comment: |
KNXModule is needed in places where no config_entry handle is available:
device_trigger, services, websocket
test-before-configure:
status: exempt
comment: |
For automatic connection modes, there has already been successful communication
with the KNX interface at the discovery process.
For manual tunneling, we avoid making short-lived connections since there seem to be
interfaces having troubles with that.
For routing, the protocol doesn't provide any means to test since it is connectionless multicast.
test-before-setup: done
unique-config-entry:
status: done
comment: Single config entry.
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable:
status: done
comment: |
The library logs when the connection is lost / reconnected. Individual entities don't.
parallel-updates:
status: exempt
comment: |
Integration is push based.
reauthentication-flow:
status: exempt
comment: |
Integration has no authentication.
test-coverage: done
# Gold
devices:
status: exempt
comment: |
YAML entities don't support devices. UI entities support user-defined devices.
diagnostics: done
discovery-update-info: todo
discovery:
status: exempt
comment: |
KNX doesn't support any provided discovery method.
docs-data-update: todo
docs-examples: done
docs-known-limitations: todo
docs-supported-devices:
status: exempt
comment: |
Devices aren't supported directly since communication is on group address level.
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
Devices aren't discoverable in KNX.
entity-category:
status: exempt
comment: |
Entity category can be configured by the user.
entity-device-class:
status: exempt
comment: |
Entity category can be configured by the user. Proper defaults are determined by configured DPT.
entity-disabled-by-default:
status: exempt
comment: |
Since all entities are configured manually, they are enabled by default.
entity-translations:
status: exempt
comment: |
Since all entities are configured manually, names are user-defined.
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues: todo
stale-devices:
status: exempt
comment: |
Devices aren't discoverable in KNX. Manual device removal is implemented.
# Platinum
async-dependency: done
inject-websession:
status: exempt
comment: |
No HTTP is used.
strict-typing: done

View File

@@ -211,7 +211,7 @@ class KNXSystemSensor(SensorEntity):
return True
return self.knx.xknx.connection_manager.state is XknxConnectionState.CONNECTED
def after_update_callback(self, _: XknxConnectionState) -> None:
def after_update_callback(self, device: XknxConnectionState) -> None:
"""Call after device was updated."""
self.async_write_ha_state()

View File

@@ -3,23 +3,30 @@
"step": {
"connection_type": {
"title": "KNX connection",
"description": "Please enter the connection type we should use for your KNX connection. \n AUTOMATIC - The integration takes care of the connectivity to your KNX Bus by performing a gateway scan. \n TUNNELING - The integration will connect to your KNX bus via tunneling. \n ROUTING - The integration will connect to your KNX bus via routing.",
"description": "'Automatic' performs a gateway scan on start, to find a KNX IP interface. It will connect via a tunnel. (Not available if a gateway scan was not successful.) \n\n 'Tunneling' will connect to a specific KNX IP interface over a tunnel. \n\n 'Routing' will use Multicast to communicate with KNX IP routers.",
"data": {
"connection_type": "KNX Connection Type"
},
"data_description": {
"connection_type": "Please select the connection type you want to use for your KNX connection."
}
},
"tunnel": {
"title": "Tunnel",
"description": "Please select a gateway from the list.",
"data": {
"gateway": "KNX Tunnel Connection"
"gateway": "Please select a gateway from the list."
},
"data_description": {
"gateway": "Select a KNX tunneling interface you want use for the connection."
}
},
"tcp_tunnel_endpoint": {
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
"title": "Tunnel endpoint",
"data": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
"tunnel_endpoint_ia": "Select the tunnel endpoint used for the connection."
},
"data_description": {
"tunnel_endpoint_ia": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
}
},
"manual_tunnel": {
@@ -27,23 +34,24 @@
"description": "Please enter the connection information of your tunneling device.",
"data": {
"tunneling_type": "KNX Tunneling Type",
"port": "[%key:common::config_flow::data::port%]",
"host": "[%key:common::config_flow::data::host%]",
"port": "[%key:common::config_flow::data::port%]",
"route_back": "Route back / NAT mode",
"local_ip": "Local IP interface"
},
"data_description": {
"port": "Port of the KNX/IP tunneling device.",
"tunneling_type": "Select the tunneling type of your KNX/IP tunneling device. Older interfaces may only support `UDP`.",
"host": "IP address or hostname of the KNX/IP tunneling device.",
"port": "Port used by the KNX/IP tunneling device.",
"route_back": "Enable if your KNXnet/IP tunneling server is behind NAT. Only applies for UDP connections.",
"local_ip": "Local IP or interface name used for the connection from Home Assistant. Leave blank to use auto-discovery."
}
},
"secure_key_source_menu_tunnel": {
"title": "KNX IP-Secure",
"description": "Select how you want to configure KNX/IP Secure.",
"description": "How do you want to configure KNX/IP Secure?",
"menu_options": {
"secure_knxkeys": "Use a `.knxkeys` file containing IP secure keys",
"secure_knxkeys": "Use a `.knxkeys` file providing IP secure keys",
"secure_tunnel_manual": "Configure IP secure credentials manually"
}
},
@@ -57,20 +65,23 @@
},
"secure_knxkeys": {
"title": "Import KNX Keyring",
"description": "Please select a `.knxkeys` file to import.",
"description": "The Keyring is used to encrypt and decrypt KNX IP Secure communication.",
"data": {
"knxkeys_file": "Keyring file",
"knxkeys_password": "The password to decrypt the `.knxkeys` file"
"knxkeys_password": "Keyring password"
},
"data_description": {
"knxkeys_password": "This was set when exporting the file from ETS."
"knxkeys_file": "Select a `.knxkeys` file. This can be exported from ETS.",
"knxkeys_password": "The password to open the `.knxkeys` file was set when exporting."
}
},
"knxkeys_tunnel_select": {
"title": "Tunnel endpoint",
"description": "Select the tunnel endpoint used for the connection.",
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
"data": {
"user_id": "'Automatic' selects a free tunnel endpoint for you when connecting. If you're unsure, this is the best option."
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
},
"data_description": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
}
},
"secure_tunnel_manual": {
@@ -82,7 +93,7 @@
"device_authentication": "Device authentication password"
},
"data_description": {
"user_id": "This is often tunnel number +1. So 'Tunnel 2' would have User-ID '3'.",
"user_id": "This usually is tunnel number +1. So first tunnel in the list presented in ETS would have User-ID `2`.",
"user_password": "Password for the specific tunnel connection set in the 'Properties' panel of the tunnel in ETS.",
"device_authentication": "This is set in the 'IP' panel of the interface in ETS."
}
@@ -95,8 +106,8 @@
"sync_latency_tolerance": "Network latency tolerance"
},
"data_description": {
"backbone_key": "Can be seen in the 'Security' report of an ETS project. Eg. '00112233445566778899AABBCCDDEEFF'",
"sync_latency_tolerance": "Default is 1000."
"backbone_key": "Can be seen in the 'Security' report of your ETS project. Eg. `00112233445566778899AABBCCDDEEFF`",
"sync_latency_tolerance": "Should be equal to the backbone configuration of your ETS project. Default is `1000`"
}
},
"routing": {
@@ -104,13 +115,16 @@
"description": "Please configure the routing options.",
"data": {
"individual_address": "Individual address",
"routing_secure": "Use KNX IP Secure",
"routing_secure": "KNX IP Secure Routing",
"multicast_group": "Multicast group",
"multicast_port": "Multicast port",
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
},
"data_description": {
"individual_address": "KNX address to be used by Home Assistant, e.g. `0.0.4`",
"routing_secure": "Select if your installation uses encrypted communication according to the KNX IP Secure standard. This setting requires compatible devices and configuration. You'll be prompted for credentials in the next step.",
"multicast_group": "Multicast group used by your installation. Default is `224.0.23.12`",
"multicast_port": "Multicast port used by your installation. Default is `3671`",
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
}
}
@@ -148,7 +162,7 @@
},
"data_description": {
"state_updater": "Set default for reading states from the KNX Bus. When disabled, Home Assistant will not actively retrieve entity states from the KNX Bus. Can be overridden by `sync_state` entity options.",
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: 0 or 20 to 40",
"rate_limit": "Maximum outgoing telegrams per second.\n`0` to disable limit. Recommended: `0` or between `20` and `40`",
"telegram_log_size": "Telegrams to keep in memory for KNX panel group monitor. Maximum: {telegram_log_size_max}"
}
},
@@ -157,20 +171,27 @@
"description": "[%key:component::knx::config::step::connection_type::description%]",
"data": {
"connection_type": "[%key:component::knx::config::step::connection_type::data::connection_type%]"
},
"data_description": {
"connection_type": "[%key:component::knx::config::step::connection_type::data_description::connection_type%]"
}
},
"tunnel": {
"title": "[%key:component::knx::config::step::tunnel::title%]",
"description": "[%key:component::knx::config::step::tunnel::description%]",
"data": {
"gateway": "[%key:component::knx::config::step::tunnel::data::gateway%]"
},
"data_description": {
"gateway": "[%key:component::knx::config::step::tunnel::data_description::gateway%]"
}
},
"tcp_tunnel_endpoint": {
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
"data": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
},
"data_description": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
}
},
"manual_tunnel": {
@@ -184,6 +205,7 @@
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data::local_ip%]"
},
"data_description": {
"tunneling_type": "[%key:component::knx::config::step::manual_tunnel::data_description::tunneling_type%]",
"port": "[%key:component::knx::config::step::manual_tunnel::data_description::port%]",
"host": "[%key:component::knx::config::step::manual_tunnel::data_description::host%]",
"route_back": "[%key:component::knx::config::step::manual_tunnel::data_description::route_back%]",
@@ -214,14 +236,17 @@
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
},
"data_description": {
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
}
},
"knxkeys_tunnel_select": {
"title": "[%key:component::knx::config::step::knxkeys_tunnel_select::title%]",
"description": "[%key:component::knx::config::step::knxkeys_tunnel_select::description%]",
"title": "[%key:component::knx::config::step::tcp_tunnel_endpoint::title%]",
"data": {
"user_id": "[%key:component::knx::config::step::knxkeys_tunnel_select::data::user_id%]"
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data::tunnel_endpoint_ia%]"
},
"data_description": {
"tunnel_endpoint_ia": "[%key:component::knx::config::step::tcp_tunnel_endpoint::data_description::tunnel_endpoint_ia%]"
}
},
"secure_tunnel_manual": {
@@ -262,6 +287,9 @@
},
"data_description": {
"individual_address": "[%key:component::knx::config::step::routing::data_description::individual_address%]",
"routing_secure": "[%key:component::knx::config::step::routing::data_description::routing_secure%]",
"multicast_group": "[%key:component::knx::config::step::routing::data_description::multicast_group%]",
"multicast_port": "[%key:component::knx::config::step::routing::data_description::multicast_port%]",
"local_ip": "[%key:component::knx::config::step::manual_tunnel::data_description::local_ip%]"
}
}

View File

@@ -37,5 +37,5 @@
"iot_class": "cloud_polling",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==1.4.2"]
"requirements": ["pylamarzocco==1.4.5"]
}

View File

@@ -230,8 +230,6 @@ def async_host_input_received(
)
identifiers = {(DOMAIN, generate_unique_id(config_entry.entry_id, address))}
device = device_registry.async_get_device(identifiers=identifiers)
if device is None:
return
if isinstance(inp, pypck.inputs.ModStatusAccessControl):
_async_fire_access_control_event(hass, device, address, inp)
@@ -240,7 +238,10 @@ def async_host_input_received(
def _async_fire_access_control_event(
hass: HomeAssistant, device: dr.DeviceEntry, address: AddressType, inp: InputType
hass: HomeAssistant,
device: dr.DeviceEntry | None,
address: AddressType,
inp: InputType,
) -> None:
"""Fire access control event (transponder, transmitter, fingerprint, codelock)."""
event_data = {
@@ -262,7 +263,10 @@ def _async_fire_access_control_event(
def _async_fire_send_keys_event(
hass: HomeAssistant, device: dr.DeviceEntry, address: AddressType, inp: InputType
hass: HomeAssistant,
device: dr.DeviceEntry | None,
address: AddressType,
inp: InputType,
) -> None:
"""Fire send_keys event."""
for table, action in enumerate(inp.actions):

View File

@@ -9,12 +9,7 @@ import voluptuous as vol
from yarl import URL
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_CLIENT_ID,
CONF_CLIENT_SECRET,
CONF_NAME,
)
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
@@ -22,7 +17,7 @@ from homeassistant.helpers.selector import (
)
from homeassistant.util import slugify
from .const import CONF_BASE_URL, DEFAULT_URL, DOMAIN, LOGGER
from .const import CONF_BASE_URL, DOMAIN, LOGGER
from .utils import construct_mastodon_username, create_mastodon_client
STEP_USER_DATA_SCHEMA = vol.Schema(
@@ -130,44 +125,3 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
)
return self.show_user_form(user_input, errors)
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Import a config entry from configuration.yaml."""
errors: dict[str, str] | None = None
LOGGER.debug("Importing Mastodon from configuration.yaml")
base_url = base_url_from_url(str(import_data.get(CONF_BASE_URL, DEFAULT_URL)))
client_id = str(import_data.get(CONF_CLIENT_ID))
client_secret = str(import_data.get(CONF_CLIENT_SECRET))
access_token = str(import_data.get(CONF_ACCESS_TOKEN))
name = import_data.get(CONF_NAME)
instance, account, errors = await self.hass.async_add_executor_job(
self.check_connection,
base_url,
client_id,
client_secret,
access_token,
)
if not errors:
name = construct_mastodon_username(instance, account)
await self.async_set_unique_id(slugify(name))
self._abort_if_unique_id_configured()
if not name:
name = construct_mastodon_username(instance, account)
return self.async_create_entry(
title=name,
data={
CONF_BASE_URL: base_url,
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
CONF_ACCESS_TOKEN: access_token,
},
)
reason = next(iter(errors.items()))[1]
return self.async_abort(reason=reason)

View File

@@ -14,14 +14,12 @@ from homeassistant.components.notify import (
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import CONF_BASE_URL, DEFAULT_URL, DOMAIN, LOGGER
from .const import CONF_BASE_URL, DEFAULT_URL, LOGGER
ATTR_MEDIA = "media"
ATTR_TARGET = "target"
@@ -46,51 +44,7 @@ async def async_get_service(
discovery_info: DiscoveryInfoType | None = None,
) -> MastodonNotificationService | None:
"""Get the Mastodon notification service."""
if not discovery_info:
# Import config entry
import_result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
if (
import_result["type"] == FlowResultType.ABORT
and import_result["reason"] != "already_configured"
):
ir.async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_import_issue_{import_result["reason"]}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key=f"deprecated_yaml_import_issue_{import_result["reason"]}",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
return None
ir.async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=ir.IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
if discovery_info is None:
return None
client: Mastodon = discovery_info.get("client")

View File

@@ -25,20 +25,6 @@
"unknown": "Unknown error occured when connecting to the Mastodon instance."
}
},
"issues": {
"deprecated_yaml_import_issue_unauthorized_error": {
"title": "YAML import failed due to an authentication error",
"description": "Configuring {integration_title} using YAML is being removed but there was an authentication error while importing your existing configuration.\nPlease use the UI to configure Mastodon. Don't forget to delete the YAML configuration."
},
"deprecated_yaml_import_issue_network_error": {
"title": "YAML import failed because the instance was not found",
"description": "Configuring {integration_title} using YAML is being removed but no instance was found while importing your existing configuration.\nPlease use the UI to configure Mastodon. Don't forget to delete the YAML configuration."
},
"deprecated_yaml_import_issue_unknown": {
"title": "YAML import failed with unknown error",
"description": "Configuring {integration_title} using YAML is being removed but there was an unknown error while importing your existing configuration.\nPlease use the UI to configure Mastodon. Don't forget to delete the YAML configuration."
}
},
"entity": {
"sensor": {
"followers": {

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING, cast
from chip.clusters import Objects as clusters
from matter_server.client.models.device_types import BridgedDevice
from matter_server.client.models.device_types import BridgedNode
from matter_server.common.models import EventType, ServerInfoMessage
from homeassistant.config_entries import ConfigEntry
@@ -162,7 +162,7 @@ class MatterAdapter:
(
x
for x in endpoint.device_types
if x.device_type != BridgedDevice.device_type
if x.device_type != BridgedNode.device_type
),
None,
)

View File

@@ -40,9 +40,9 @@ from .util import (
)
COLOR_MODE_MAP = {
clusters.ColorControl.Enums.ColorMode.kCurrentHueAndCurrentSaturation: ColorMode.HS,
clusters.ColorControl.Enums.ColorMode.kCurrentXAndCurrentY: ColorMode.XY,
clusters.ColorControl.Enums.ColorMode.kColorTemperature: ColorMode.COLOR_TEMP,
clusters.ColorControl.Enums.ColorModeEnum.kCurrentHueAndCurrentSaturation: ColorMode.HS,
clusters.ColorControl.Enums.ColorModeEnum.kCurrentXAndCurrentY: ColorMode.XY,
clusters.ColorControl.Enums.ColorModeEnum.kColorTemperatureMireds: ColorMode.COLOR_TEMP,
}
# there's a bug in (at least) Espressif's implementation of light transitions
@@ -355,21 +355,21 @@ class MatterLight(MatterEntity, LightEntity):
if (
capabilities
& clusters.ColorControl.Bitmaps.ColorCapabilities.kHueSaturationSupported
& clusters.ColorControl.Bitmaps.ColorCapabilitiesBitmap.kHueSaturation
):
supported_color_modes.add(ColorMode.HS)
self._supports_color = True
if (
capabilities
& clusters.ColorControl.Bitmaps.ColorCapabilities.kXYAttributesSupported
& clusters.ColorControl.Bitmaps.ColorCapabilitiesBitmap.kXy
):
supported_color_modes.add(ColorMode.XY)
self._supports_color = True
if (
capabilities
& clusters.ColorControl.Bitmaps.ColorCapabilities.kColorTemperatureSupported
& clusters.ColorControl.Bitmaps.ColorCapabilitiesBitmap.kColorTemperature
):
supported_color_modes.add(ColorMode.COLOR_TEMP)
self._supports_color_temperature = True

View File

@@ -7,6 +7,6 @@
"dependencies": ["websocket_api"],
"documentation": "https://www.home-assistant.io/integrations/matter",
"iot_class": "local_push",
"requirements": ["python-matter-server==6.6.0"],
"requirements": ["python-matter-server==7.0.0"],
"zeroconf": ["_matter._tcp.local.", "_matterc._udp.local."]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/mealie",
"integration_type": "service",
"iot_class": "local_polling",
"requirements": ["aiomealie==0.9.4"]
"requirements": ["aiomealie==0.9.5"]
}

View File

@@ -8,6 +8,6 @@
"iot_class": "calculated",
"loggers": ["yt_dlp"],
"quality_scale": "internal",
"requirements": ["yt-dlp[default]==2024.12.13"],
"requirements": ["yt-dlp[default]==2024.12.23"],
"single_config_entry": true
}

View File

@@ -46,9 +46,13 @@ from homeassistant.const import (
CONF_TYPE,
CONF_UNIQUE_ID,
CONF_UNIT_OF_MEASUREMENT,
SERVICE_RELOAD,
)
from homeassistant.core import HomeAssistant
from homeassistant.core import Event, HomeAssistant, ServiceCall
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import async_get_platforms
from homeassistant.helpers.reload import async_integration_yaml_config
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType
from .const import (
@@ -451,18 +455,29 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Modbus component."""
if DOMAIN not in config:
return True
async def _reload_config(call: Event | ServiceCall) -> None:
"""Reload Modbus."""
if DOMAIN not in hass.data:
_LOGGER.error("Modbus cannot reload, because it was never loaded")
return
hubs = hass.data[DOMAIN]
for name in hubs:
await hubs[name].async_close()
reset_platforms = async_get_platforms(hass, DOMAIN)
for reset_platform in reset_platforms:
_LOGGER.debug("Reload modbus resetting platform: %s", reset_platform.domain)
await reset_platform.async_reset()
reload_config = await async_integration_yaml_config(hass, DOMAIN)
if not reload_config:
_LOGGER.debug("Modbus not present anymore")
return
_LOGGER.debug("Modbus reloading")
await async_modbus_setup(hass, reload_config)
async_register_admin_service(hass, DOMAIN, SERVICE_RELOAD, _reload_config)
return await async_modbus_setup(
hass,
config,
)
async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None:
"""Release modbus resources."""
if DOMAIN not in hass.data:
_LOGGER.error("Modbus cannot reload, because it was never loaded")
return
_LOGGER.debug("Modbus reloading")
hubs = hass.data[DOMAIN]
for name in hubs:
await hubs[name].async_close()

View File

@@ -34,7 +34,6 @@ import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType
from .const import (
@@ -125,8 +124,6 @@ async def async_modbus_setup(
) -> bool:
"""Set up Modbus component."""
await async_setup_reload_service(hass, DOMAIN, [DOMAIN])
if config[DOMAIN]:
config[DOMAIN] = check_config(hass, config[DOMAIN])
if not config[DOMAIN]:

View File

@@ -6,14 +6,14 @@ import asyncio
from collections.abc import Callable
from datetime import datetime
import logging
from typing import TYPE_CHECKING, Any, cast
from typing import Any, cast
import voluptuous as vol
from homeassistant import config as conf_util
from homeassistant.components import websocket_api
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_DISCOVERY, CONF_PAYLOAD, SERVICE_RELOAD
from homeassistant.const import CONF_DISCOVERY, SERVICE_RELOAD
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import (
ConfigValidationError,
@@ -25,7 +25,6 @@ from homeassistant.helpers import (
entity_registry as er,
event as ev,
issue_registry as ir,
template,
)
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
@@ -113,8 +112,6 @@ _LOGGER = logging.getLogger(__name__)
SERVICE_PUBLISH = "publish"
SERVICE_DUMP = "dump"
ATTR_TOPIC_TEMPLATE = "topic_template"
ATTR_PAYLOAD_TEMPLATE = "payload_template"
ATTR_EVALUATE_PAYLOAD = "evaluate_payload"
MAX_RECONNECT_WAIT = 300 # seconds
@@ -155,25 +152,16 @@ CONFIG_SCHEMA = vol.Schema(
extra=vol.ALLOW_EXTRA,
)
# The use of a topic_template and payload_template in an mqtt publish action call
# have been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0
# Publish action call validation schema
MQTT_PUBLISH_SCHEMA = vol.All(
vol.Schema(
{
vol.Exclusive(ATTR_TOPIC, CONF_TOPIC): valid_publish_topic,
vol.Exclusive(ATTR_TOPIC_TEMPLATE, CONF_TOPIC): cv.string,
vol.Exclusive(ATTR_PAYLOAD, CONF_PAYLOAD): cv.string,
vol.Exclusive(ATTR_PAYLOAD_TEMPLATE, CONF_PAYLOAD): cv.string,
vol.Optional(ATTR_EVALUATE_PAYLOAD): cv.boolean,
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): valid_qos_schema,
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
},
required=True,
),
cv.has_at_least_one_key(ATTR_TOPIC, ATTR_TOPIC_TEMPLATE),
MQTT_PUBLISH_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TOPIC): valid_publish_topic,
vol.Required(ATTR_PAYLOAD): cv.string,
vol.Optional(ATTR_EVALUATE_PAYLOAD): cv.boolean,
vol.Optional(ATTR_QOS, default=DEFAULT_QOS): valid_qos_schema,
vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean,
},
required=True,
)
@@ -233,86 +221,25 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_publish_service(call: ServiceCall) -> None:
"""Handle MQTT publish service calls."""
msg_topic: str | None = call.data.get(ATTR_TOPIC)
msg_topic_template: str | None = call.data.get(ATTR_TOPIC_TEMPLATE)
msg_topic: str = call.data[ATTR_TOPIC]
if not mqtt_config_entry_enabled(hass):
raise ServiceValidationError(
translation_key="mqtt_not_setup_cannot_publish",
translation_domain=DOMAIN,
translation_placeholders={
"topic": str(msg_topic or msg_topic_template)
},
translation_placeholders={"topic": msg_topic},
)
mqtt_data = hass.data[DATA_MQTT]
payload: PublishPayloadType = call.data.get(ATTR_PAYLOAD)
payload: PublishPayloadType = call.data[ATTR_PAYLOAD]
evaluate_payload: bool = call.data.get(ATTR_EVALUATE_PAYLOAD, False)
payload_template: str | None = call.data.get(ATTR_PAYLOAD_TEMPLATE)
qos: int = call.data[ATTR_QOS]
retain: bool = call.data[ATTR_RETAIN]
if msg_topic_template is not None:
# The use of a topic_template in an mqtt publish action call
# has been deprecated with HA Core 2024.8.0
# and will be removed with HA Core 2025.2.0
rendered_topic: Any = MqttCommandTemplate(
template.Template(msg_topic_template, hass),
).async_render()
ir.async_create_issue(
hass,
DOMAIN,
f"topic_template_deprecation_{rendered_topic}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="topic_template_deprecation",
translation_placeholders={
"topic_template": msg_topic_template,
"topic": rendered_topic,
},
)
try:
msg_topic = valid_publish_topic(rendered_topic)
except vol.Invalid as err:
err_str = str(err)
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_publish_topic",
translation_placeholders={
"error": err_str,
"topic": str(rendered_topic),
"topic_template": str(msg_topic_template),
},
) from err
if payload_template is not None:
# The use of a payload_template in an mqtt publish action call
# has been deprecated with HA Core 2024.8.0
# and will be removed with HA Core 2025.2.0
if TYPE_CHECKING:
assert msg_topic is not None
ir.async_create_issue(
hass,
DOMAIN,
f"payload_template_deprecation_{msg_topic}",
breaks_in_ha_version="2025.2.0",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="payload_template_deprecation",
translation_placeholders={
"topic": msg_topic,
"payload_template": payload_template,
},
)
payload = MqttCommandTemplate(
template.Template(payload_template, hass)
).async_render()
elif evaluate_payload:
if evaluate_payload:
# Convert quoted binary literal to raw data
payload = convert_outgoing_mqtt_payload(payload)
if TYPE_CHECKING:
assert msg_topic is not None
await mqtt_data.client.async_publish(msg_topic, payload, qos, retain)
hass.services.async_register(

View File

@@ -11,14 +11,6 @@
"invalid_platform_config": {
"title": "Invalid config found for mqtt {domain} item",
"description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue."
},
"payload_template_deprecation": {
"title": "Deprecated option used in mqtt publish action call",
"description": "Deprecated `payload_template` option used in MQTT publish action call to topic `{topic}` from payload template `{payload_template}`. Use the `payload` option instead. In automations templates are supported natively. Update the automation or script to use the `payload` option instead and restart Home Assistant to fix this issue."
},
"topic_template_deprecation": {
"title": "Deprecated option used in mqtt publish action call",
"description": "Deprecated `topic_template` option used in MQTT publish action call to topic `{topic}` from topic template `{topic_template}`. Use the `topic` option instead. In automations templates are supported natively. Update the automation or script to use the `topic` option instead and restart Home Assistant to fix this issue."
}
},
"config": {

View File

@@ -27,7 +27,6 @@ from homeassistant.auth.permissions.const import POLICY_READ
from homeassistant.components.camera import Image, img_util
from homeassistant.components.http import KEY_HASS_USER
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_BINARY_SENSORS,
CONF_CLIENT_ID,
@@ -55,6 +54,7 @@ from homeassistant.helpers.typing import ConfigType
from . import api
from .const import (
CONF_CLOUD_PROJECT_ID,
CONF_PROJECT_ID,
CONF_SUBSCRIBER_ID,
CONF_SUBSCRIBER_ID_IMPORTED,
@@ -214,33 +214,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
update_callback = SignalUpdateCallback(hass, async_config_reload, entry)
subscriber.set_update_callback(update_callback.async_handle_event)
try:
await subscriber.start_async()
unsub = await subscriber.start_async()
except AuthException as err:
raise ConfigEntryAuthFailed(
f"Subscriber authentication error: {err!s}"
) from err
except ConfigurationException as err:
_LOGGER.error("Configuration error: %s", err)
subscriber.stop_async()
return False
except SubscriberException as err:
subscriber.stop_async()
raise ConfigEntryNotReady(f"Subscriber error: {err!s}") from err
try:
device_manager = await subscriber.async_get_device_manager()
except ApiException as err:
subscriber.stop_async()
unsub()
raise ConfigEntryNotReady(f"Device manager error: {err!s}") from err
@callback
def on_hass_stop(_: Event) -> None:
"""Close connection when hass stops."""
subscriber.stop_async()
unsub()
entry.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop)
)
entry.async_on_unload(unsub)
entry.runtime_data = NestData(
subscriber=subscriber,
device_manager=device_manager,
@@ -251,18 +251,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool:
"""Unload a config entry."""
if DATA_SDM not in entry.data:
# Legacy API
return True
_LOGGER.debug("Stopping nest subscriber")
subscriber = entry.runtime_data.subscriber
subscriber.stop_async()
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
async def async_remove_entry(hass: HomeAssistant, entry: NestConfigEntry) -> None:
"""Handle removal of pubsub subscriptions created during config flow."""
if (
DATA_SDM not in entry.data
@@ -272,24 +266,25 @@ async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
or CONF_SUBSCRIBER_ID_IMPORTED in entry.data
):
return
subscriber = await api.new_subscriber(hass, entry)
if not subscriber:
return
_LOGGER.debug("Deleting subscriber '%s'", subscriber.subscriber_id)
if (subscription_name := entry.data.get(CONF_SUBSCRIPTION_NAME)) is None:
subscription_name = entry.data[CONF_SUBSCRIBER_ID]
admin_client = api.new_pubsub_admin_client(
hass,
access_token=entry.data["token"]["access_token"],
cloud_project_id=entry.data[CONF_CLOUD_PROJECT_ID],
)
_LOGGER.debug("Deleting subscription '%s'", subscription_name)
try:
await subscriber.delete_subscription()
except (AuthException, SubscriberException) as err:
await admin_client.delete_subscription(subscription_name)
except ApiException as err:
_LOGGER.warning(
(
"Unable to delete subscription '%s'; Will be automatically cleaned up"
" by cloud console: %s"
),
subscriber.subscriber_id,
subscription_name,
err,
)
finally:
subscriber.stop_async()
class NestEventViewBase(HomeAssistantView, ABC):

View File

@@ -12,7 +12,6 @@ from google_nest_sdm.admin_client import PUBSUB_API_HOST, AdminClient
from google_nest_sdm.auth import AbstractAuth
from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
@@ -24,6 +23,7 @@ from .const import (
OAUTH2_TOKEN,
SDM_SCOPES,
)
from .types import NestConfigEntry
_LOGGER = logging.getLogger(__name__)
@@ -102,7 +102,7 @@ class AccessTokenAuthImpl(AbstractAuth):
async def new_subscriber(
hass: HomeAssistant, entry: ConfigEntry
hass: HomeAssistant, entry: NestConfigEntry
) -> GoogleNestSubscriber | None:
"""Create a GoogleNestSubscriber."""
implementation = (

View File

@@ -19,5 +19,5 @@
"documentation": "https://www.home-assistant.io/integrations/nest",
"iot_class": "cloud_push",
"loggers": ["google_nest_sdm"],
"requirements": ["google-nest-sdm==6.1.5"]
"requirements": ["google-nest-sdm==7.0.0"]
}

View File

@@ -19,8 +19,8 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"not_ipv4_address": "No IPv4 address in ssdp discovery information",
"no_serial": "No serial number in ssdp discovery information"
"not_ipv4_address": "No IPv4 address in SSDP discovery information",
"no_serial": "No serial number in SSDP discovery information"
}
},
"options": {
@@ -48,7 +48,7 @@
"name": "SSID"
},
"access_point_mac": {
"name": "Access point mac"
"name": "Access point MAC"
},
"upload_today": {
"name": "Upload today"

View File

@@ -5,11 +5,11 @@ from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from .const import DOMAIN, PLATFORMS
from .const import CONF_AREAS, DOMAIN, LOGGER, PLATFORMS
from .coordinator import NordPoolDataUpdateCoordinator
from .services import async_setup_services
@@ -25,10 +25,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool:
async def async_setup_entry(
hass: HomeAssistant, config_entry: NordPoolConfigEntry
) -> bool:
"""Set up Nord Pool from a config entry."""
coordinator = NordPoolDataUpdateCoordinator(hass, entry)
await cleanup_device(hass, config_entry)
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
await coordinator.fetch_data(dt_util.utcnow())
if not coordinator.last_update_success:
raise ConfigEntryNotReady(
@@ -36,13 +40,33 @@ async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) ->
translation_key="initial_update_failed",
translation_placeholders={"error": str(coordinator.last_exception)},
)
entry.runtime_data = coordinator
config_entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool:
async def async_unload_entry(
hass: HomeAssistant, config_entry: NordPoolConfigEntry
) -> bool:
"""Unload Nord Pool config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
async def cleanup_device(
hass: HomeAssistant, config_entry: NordPoolConfigEntry
) -> None:
"""Cleanup device and entities."""
device_reg = dr.async_get(hass)
entries = dr.async_entries_for_config_entry(device_reg, config_entry.entry_id)
for area in config_entry.data[CONF_AREAS]:
for entry in entries:
if entry.identifiers == {(DOMAIN, area)}:
continue
LOGGER.debug("Removing device %s", entry.name)
device_reg.async_update_device(
entry.id, remove_config_entry_id=config_entry.entry_id
)

Some files were not shown because too many files have changed in this diff Show More