Compare commits

...

131 Commits

Author SHA1 Message Date
abmantis
72a524c868 Merge branch 'dev' of github.com:home-assistant/core into dev_target_triggers_conditions 2025-09-29 16:56:23 +01:00
Erik Montnemery
5975cd6e09 Revert "Add mg/m³ as a valid UOM for sensor/number Carbon Monoxide device class" (#153196) 2025-09-29 15:43:13 +01:00
RogerSelwyn
258c9ff52b Handle return result from ebusd being "empty" (#153199) 2025-09-29 16:08:42 +02:00
starkillerOG
89c5d498a4 Add Reolink Ai person type, vehicle type and animal type (#153170) 2025-09-29 15:39:29 +02:00
Artur Pragacz
76cb4d123a Filter out empty integration type in extended analytics (#153188) 2025-09-29 15:18:15 +02:00
Erik Montnemery
f0c29c7699 Revert "Add comment on conversion factor for Carbon monoxide on dependency molecular weight" (#153195) 2025-09-29 14:56:42 +02:00
Kyle Worrall
aa4151ced7 Fix for Hue Integration motion aware areas (#153079)
Co-authored-by: Marcel van der Veldt <m.vanderveldt@outlook.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-29 14:50:36 +02:00
G Johansson
0a6fa978fa Add timeout to dnsip (to handle stale connections) (#153086) 2025-09-29 14:49:38 +02:00
Simone Chemelli
dc02002b9d Bump aioamazondevices to 6.2.7 (#153185) 2025-09-29 14:30:42 +02:00
cdnninja
f071a3f38b Correct vesync water tank lifted key (#153173) 2025-09-29 14:29:25 +02:00
dependabot[bot]
b935231e47 Bump actions/dependency-review-action from 4.7.3 to 4.8.0 (#153180)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 13:17:20 +02:00
dependabot[bot]
b9f7613567 Bump github/codeql-action from 3.30.4 to 3.30.5 (#153179)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-29 13:15:53 +02:00
abmantis
b437113f31 Merge branch 'dev' of github.com:home-assistant/core into dev_target_triggers_conditions 2025-09-29 11:18:39 +01:00
Maciej Bieniek
1289a031ab Add consumed energy sensor for Shelly pm1 and switch components (#153053) 2025-09-29 13:06:07 +03:00
Andrew Jackson
289546ef6d Bump aiomealie to 0.11.0 adding times to recipes (#153183) 2025-09-29 11:58:40 +02:00
Guido Schmitz
aacff4db5d Rework devolo Home Control config flow tests (#147083)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-29 09:47:07 +02:00
starkillerOG
f833b56122 Add Reolink siren state (#153169) 2025-09-29 08:42:38 +02:00
Tom Matheussen
7eb0f2993f Fix entities not being created when adding subentries for Satel Integra (#153139) 2025-09-28 21:37:35 -04:00
Michael
abb341abfe Add newly added cpu temperatures to diagnostics in FRITZ!Tools (#153168) 2025-09-28 22:40:10 +02:00
starkillerOG
0d90614369 Bump reolink-aio to 0.16.0 (#153161) 2025-09-28 21:55:39 +02:00
starkillerOG
ec84bebeea Add Reolink AI bicycle detection entity (#153163) 2025-09-28 21:54:59 +02:00
Shay Levy
9176867d6b Add Shelly EV charger sensors (#152722) 2025-09-28 22:45:11 +03:00
Allen Porter
281a137ff5 Add missing translations for Model Context Protocol integration (#153147) 2025-09-28 20:05:15 +02:00
tronikos
d6543480ac Refactor SQL integration (#153135) 2025-09-28 19:03:13 +02:00
Luca Graf
ae6391b866 Ignore gateway device in ViCare integration (#153097) 2025-09-28 16:04:22 +02:00
Joakim Plate
10b56e4258 Ensure togrill detects disconnected devices (#153067) 2025-09-28 15:34:56 +02:00
Erwin Douna
0ff2597957 Portainer add re-auth flow (#153077) 2025-09-28 15:31:50 +02:00
Artur Pragacz
026b28e962 Improve interview logging in Onkyo (#153095) 2025-09-28 15:26:40 +02:00
peteS-UK
9a1e67294a Extend timeout test in test_config_flow for Squeezebox to completion (#153080) 2025-09-28 15:20:47 +02:00
G Johansson
cdb448a5cc Use automatic reload options flow in random (#153103) 2025-09-28 01:02:33 +01:00
G Johansson
ab80e726e2 Use automatic reload options flow in filter (#153104) 2025-09-28 01:02:14 +01:00
G Johansson
2d5d0f67b2 Use automatic reload options flow in history_stats (#153115) 2025-09-28 01:01:33 +01:00
G Johansson
d4100b6096 Use automatic reload options flow in mold_indicator (#153106) 2025-09-28 01:00:48 +01:00
G Johansson
955e854d77 Use automatic reload options flow in utility_meter (#153111) 2025-09-28 01:00:07 +01:00
G Johansson
0c37f88c49 Use automatic reload options flow in derivative (#153112) 2025-09-28 00:59:07 +01:00
G Johansson
48167eeb9c Use automatic reload options flow in worldclock (#153105) 2025-09-28 00:58:20 +01:00
G Johansson
24177197f7 Use automatic reload options flow in generic_thermostat (#153108) 2025-09-28 00:57:12 +01:00
G Johansson
863fc0ba97 Use automatic reload options flow in switch_as_x (#153109) 2025-09-28 00:52:26 +01:00
G Johansson
9f7b229d02 Use automatic reload options flow in template (#153110) 2025-09-28 00:50:00 +01:00
G Johansson
ffd909f3d9 Use automatic reload options flow in group (#153116) 2025-09-28 00:48:44 +01:00
Tom
1ebf096a33 Add reauthentication flow to airOS (#153076)
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
2025-09-27 23:28:14 +02:00
Robert Resch
96d51965e5 Bump deebot-client to 15.0.0 (#153125) 2025-09-27 23:24:39 +02:00
G Johansson
04b510b020 Fix event range in workday calendar (#153128) 2025-09-27 23:22:39 +02:00
G Johansson
c9a301d50e Use automatic reload options flow in systemmonitor (#153107) 2025-09-27 20:50:14 +02:00
G Johansson
b304bd1a8b Use automatic reload options flow in local_file (#153114) 2025-09-27 20:49:39 +02:00
G Johansson
b99525b231 Use automatic reload options flow in tod (#153113) 2025-09-27 20:45:40 +02:00
G Johansson
634db13990 Use automatic reload options flow in trend (#153117) 2025-09-27 20:44:53 +02:00
peteS-UK
ad51a77989 Extend squeezebox config_flow test to completion (#153000)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2025-09-27 20:36:38 +02:00
G Johansson
3348a39e8a Use automatic reload options flow in generic_hygrostat (#153102) 2025-09-27 20:33:57 +02:00
Christian McHugh
81c2e356ec Fix: Set EPH climate heating as on only when boiler is actively heating (#152914) 2025-09-27 20:19:57 +02:00
Jan Bouwhuis
de6c3512d2 Add IMAP fetch message part feature (#152845) 2025-09-27 14:49:26 +02:00
G Johansson
36dc1e938a Fix can exclude optional holidays in workday (#153082) 2025-09-27 14:40:29 +02:00
Sören Beye
07a78cf6f7 Squeezebox: Proxy all the thumbnails (#147199)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-27 14:39:15 +02:00
Erwin Douna
eaa673e0c3 Portainer switch terminology to API token (#152958)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-27 14:32:25 +02:00
Simone Chemelli
f2c4ca081f Remove redundant code for Alexa Devices (#153083) 2025-09-27 13:05:07 +02:00
Thomas D
e3d707f0b4 Prevent duplicate entities for Volvo integration (#151779) 2025-09-27 12:29:11 +02:00
Tom
fb93fed2e5 Bump airOS dependency (#153065) 2025-09-27 01:20:51 +02:00
Björn Dalfors
95dfc2f23d Bump nibe dependency to 2.19.0 (#153062) 2025-09-26 23:49:40 +01:00
Franck Nijhof
408df2093a Update Home Assistant base image to 2025.09.3 (#153064) 2025-09-26 23:28:43 +02:00
Eskander Bejaoui
f32bf0cc3e nmap_tracker: Optimize default scan options (#153047) 2025-09-26 22:31:49 +02:00
peteS-UK
dbbe3145b6 Replace patch of entity_registry in test_config_flow for Squeezebox (#153039) 2025-09-26 22:17:47 +02:00
Erik Montnemery
f8bf3ea2ef Correct filter of target selector in motioneye services (#152971) 2025-09-26 22:08:19 +02:00
Bouwe Westerdijk
053bd31d43 Snapshot testing for Plugwise Switch platform (#153030) 2025-09-26 22:07:42 +02:00
DeerMaximum
1aefc3f37a NINA Use better wording for filters (#153050) 2025-09-26 22:05:10 +02:00
Joris Pelgröm
3de955d9ce Use UnitOfTime.DAYS instead of custom unit for LetPot number entity (#153054) 2025-09-26 21:58:17 +02:00
SapuSeven
0ff88fd366 Add None-check for VeSync fan device.state.display_status (#153055) 2025-09-26 21:57:01 +02:00
peteS-UK
eb84020773 Replace platform setup functions with fixtures with autouse in Squeezebox tests (#153057) 2025-09-26 21:49:58 +02:00
Tom
4bbfea3c7c Add SSL options during config_flow for airOS (#150325)
Co-authored-by: Åke Strandberg <ake@strandberg.eu>
Co-authored-by: G Johansson <goran.johansson@shiftit.se>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-26 21:38:27 +02:00
Josef Zweck
63d4fb7558 Ensure token validity in lamarzocco (#153058) 2025-09-26 21:36:03 +02:00
Artur Pragacz
953895cd81 Use satellite entity area in the assist pipeline (#153017) 2025-09-26 21:34:45 +02:00
Erwin Douna
a6c3f4efc0 Portainer add ability to skip SSL verification (#152955) 2025-09-26 21:32:49 +02:00
Paul Bottein
11e880d034 Update frontend to 20250926.0 (#153049) 2025-09-26 21:31:47 +02:00
Martin Hjelmare
e4d6bdb398 Fix Thread flow abort on multiple flows (#153048) 2025-09-26 18:48:51 +01:00
Andrew Jackson
6ced1783e3 Add discovery to Mealie (#151773)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-09-26 18:48:19 +02:00
Paulus Schoutsen
8051f78d10 Push ESPHome discovery to ZJS addon (#153004) 2025-09-26 10:12:56 -04:00
Josef Zweck
b724176b23 Bump pylamarzocco to 2.1.1 (#153027) 2025-09-26 15:46:24 +02:00
Erik Montnemery
fdca16ea92 Fix typing in ObjectSelectorConfig (#153043) 2025-09-26 15:18:18 +02:00
Stefan Agner
f8fd8b432a Update Home Assistant base image to 2025.09.2 (#153035) 2025-09-26 13:03:39 +02:00
lliwog
9148ae70ce Fix EZVIZ devices merging due to empty MAC addr (#152939) (#152981)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-26 12:47:11 +02:00
RogerSelwyn
447cb26d28 Protect against last_comms being None (#149366)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-09-26 12:35:04 +02:00
J. Nick Koston
2af36465f6 Bump aioesphomeapi to 41.11.0 (#153014) 2025-09-26 12:31:59 +02:00
dependabot[bot]
d5f7265424 Bump github/codeql-action from 3.30.3 to 3.30.4 (#153015)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-26 12:31:15 +02:00
Simone Chemelli
cc16af7f2d Code optimization for Uptime Robot (#153031) 2025-09-26 12:29:02 +02:00
Retha Runolfsson
7a4d75bc44 Add garage door opener for switchbot integration (#148460) 2025-09-26 12:11:59 +02:00
Bouwe Westerdijk
ec0380fd3b Snapshot testing for Plugwise Sensor platform (#153021) 2025-09-26 11:22:14 +02:00
Stefan Agner
b17cc71dfb Bump to home-assistant/wheels@2025.09.1 (#153025) 2025-09-26 11:04:02 +02:00
Erik Montnemery
89b327ed7b Remove device filter from target selector in bang_olufsen services (#152957) 2025-09-26 09:02:14 +02:00
Simone Chemelli
9bf361a1b8 Fix PIN failure if starting with 0 for Comelit SimpleHome (#152983) 2025-09-26 08:59:03 +02:00
J. Diego Rodríguez Royo
d11c171c75 Bump aiohomeconnect to version 0.20.0 (#153003) 2025-09-26 07:49:38 +02:00
puddly
c523c45d17 Allow ZHA discovery if discovery unique_id conflicts with config entry (#153009)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-26 07:39:00 +02:00
puddly
c1b9c0e1b6 Ignore discovery for existing ZHA entries (#152984) 2025-09-26 07:17:01 +02:00
puddly
487b9ff03e Bump ZHA to 0.0.73 (#153007) 2025-09-25 23:44:25 -04:00
Simone Chemelli
ec62b0cdfb Code optimization for Uptime Robot (#152993) 2025-09-26 00:34:09 +01:00
Brandon Harvey
6d0470064f Rename service to action in ESPHome (#152997) 2025-09-25 14:54:06 -05:00
Simone Chemelli
7450b3fd1a Improve tests for Alexa Devices (#152995) 2025-09-25 21:39:44 +02:00
Noah Husby
5b70910d77 Bump aiorussound to 4.8.2 (#152988) 2025-09-25 20:34:29 +02:00
Abílio Costa
52de5ff5ff Remove deprecated zone and event condition keys (#152986) 2025-09-25 19:23:40 +02:00
J. Nick Koston
c4389a1679 Bump aioesphomeapi to 41.10.0 (#152975)
Co-authored-by: Michael Hansen <mike@rhasspy.org>
2025-09-25 19:21:17 +02:00
Norbert Rittel
35faaa6cae Add missing square brackets to references in fully_kiosk actions (#152987) 2025-09-25 19:19:27 +02:00
Paul Bottein
3c0b13975a Update frontend to 20250925.1 (#152985) 2025-09-25 19:05:12 +02:00
Simone Chemelli
bc88696339 Remove deprecated sensors and update remaning for Alexa Devices (#151230) 2025-09-25 18:59:53 +02:00
Erik Montnemery
8f99c3f64a Remove device filter from target selector in lyric services (#152970) 2025-09-25 18:45:32 +02:00
Erik Montnemery
88016d96d4 Remove device and entity filter from target selector in homeassistant services (#152969) 2025-09-25 17:41:54 +01:00
Erik Montnemery
47df73b18f Remove device filter from target selector in google_mail services (#152968) 2025-09-25 18:32:12 +02:00
Maciej Bieniek
1c12d2b8cd Bump accuweather to version 4.2.2 (#152965) 2025-09-25 18:30:47 +02:00
Erik Montnemery
eb38837a8c Replace target selector with device selector in fully_kiosk services (#152959)
Co-authored-by: Franck Nijhof <git@frenck.dev>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2025-09-25 18:30:05 +02:00
Erik Montnemery
159c7fbfd1 Correct filter of target selector in sonos services (#152972) 2025-09-25 18:29:26 +02:00
Joost Lekkerkerker
7ee31f0884 Bump pySmartThings to 3.3.0 (#152977) 2025-09-25 17:57:30 +02:00
Daniel Potthast
0c5e12571a Update mvglive component (#146479)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2025-09-25 17:20:43 +02:00
Luke Lashley
9db973217f Fix incorrect Roborock test (#152980) 2025-09-25 17:18:24 +02:00
Artur Pragacz
cf1a745283 Move condition-specific fields into options (#152635) 2025-09-25 15:55:50 +02:00
peteS-UK
834e3f1963 Add HassKey for hass.data in Squeezebox (#149129) 2025-09-25 14:05:40 +02:00
Joakim Sørensen
3f8f7573c9 Bump hass-nabucasa from 1.1.1 to 1.1.2 (#152950) 2025-09-25 11:34:14 +01:00
Karsten Bade
0ae272f1f6 Add return types and docstring to sonos component (#152946) 2025-09-25 11:34:38 +02:00
Paul Bottein
8774295e2e Update frontend to 20250925.0 (#152945) 2025-09-25 11:33:01 +02:00
Erwin Douna
0c8d2594ef Portainer fix unique entity (#152941)
Co-authored-by: Franck Nijhof <frenck@frenck.nl>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-09-25 09:49:22 +02:00
Simone Chemelli
205bd2676b Update IQS to platinum for Alexa Devices (#152905) 2025-09-25 09:45:50 +02:00
dependabot[bot]
25849fd9cc Bump actions/cache from 4.2.4 to 4.3.0 (#152934)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-25 09:43:03 +02:00
Sab44
7d6eac9ff7 Bump librehardwaremonitor-api to version 1.4.0 (#152938) 2025-09-25 09:42:31 +02:00
Luke Lashley
31017ebc98 Fix logical error when user has no Roborock maps (#152752) 2025-09-25 09:39:52 +02:00
Jimmy Zhening Luo
724a7b0ecc Quality: mark installation param doc as done (#152909) 2025-09-25 09:06:13 +02:00
Paulus Schoutsen
91e13d447a Prevent common control calling async methods from thread (#152931)
Co-authored-by: J. Nick Koston <nick@home-assistant.io>
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-09-24 23:09:54 -04:00
J. Nick Koston
7c8ad9d535 Fix ESPHome reauth not being triggered on incorrect password (#152911) 2025-09-24 22:27:40 -04:00
Franck Nijhof
9cd3ab853d Add block Spook < 4.0.0 as breaking Home Assistant (#152930) 2025-09-24 22:18:06 -04:00
Paulus Schoutsen
0b0f8c5829 Remove some more domains from common controls (#152927) 2025-09-24 22:15:29 -04:00
J. Nick Koston
ae7bc7fb1b Bump aioesphomeapi to 41.9.4 (#152923) 2025-09-24 19:16:48 -05:00
Franck Nijhof
09750872b5 Bump version to 2025.11.0dev0 (#152915) 2025-09-24 23:55:32 +02:00
Franck Nijhof
076e51017b Bump to home-assistant/wheels@2025.09.0 (#152920) 2025-09-24 23:12:20 +02:00
Simone Chemelli
95e7b00996 Update IQS to platinum for Comelit SimpleHome (#152906) 2025-09-24 22:03:31 +01:00
J. Nick Koston
ddecf1ac21 Bump aioesphomeapi to 41.9.3 to fix segfault (#152912) 2025-09-24 22:00:45 +01:00
Abílio Costa
e0e263d3b5 Add state trigger to light component (#148416)
Co-authored-by: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com>
2025-09-18 19:53:26 +01:00
271 changed files with 17812 additions and 2091 deletions

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 8
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2025.10"
HA_SHORT_VERSION: "2025.11"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13']"
# 10.3 is the oldest supported version
@@ -263,7 +263,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -279,7 +279,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
@@ -309,7 +309,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -318,7 +318,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -349,7 +349,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -358,7 +358,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -389,7 +389,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -398,7 +398,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
@@ -505,7 +505,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
key: >-
@@ -513,7 +513,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -525,7 +525,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -570,7 +570,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -622,7 +622,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -651,7 +651,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -684,7 +684,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -711,7 +711,7 @@ jobs:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Dependency review
uses: actions/dependency-review-action@595b5aeba73380359d98a5e087f648dbb0edce1b # v4.7.3
uses: actions/dependency-review-action@56339e523c0409420f6c2c9a2f4292bbb3c07dd3 # v4.8.0
with:
license-check: false # We use our own license audit checks
@@ -741,7 +741,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -784,7 +784,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -831,7 +831,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -883,7 +883,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -891,7 +891,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: .mypy_cache
key: >-
@@ -935,7 +935,7 @@ jobs:
name: Split tests for full run
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -967,7 +967,7 @@ jobs:
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1009,7 +1009,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1042,7 +1042,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1156,7 +1156,7 @@ jobs:
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1189,7 +1189,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1310,7 +1310,7 @@ jobs:
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1345,7 +1345,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true
@@ -1485,7 +1485,7 @@ jobs:
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
steps:
- name: Restore apt cache
uses: actions/cache/restore@v4.2.4
uses: actions/cache/restore@v4.3.0
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1518,7 +1518,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: venv
fail-on-cache-miss: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
category: "/language:python"

View File

@@ -160,7 +160,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: home-assistant/wheels@2025.07.0
uses: home-assistant/wheels@2025.09.1
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -221,7 +221,7 @@ jobs:
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: home-assistant/wheels@2025.07.0
uses: home-assistant/wheels@2025.09.1
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["accuweather"],
"requirements": ["accuweather==4.2.1"]
"requirements": ["accuweather==4.2.2"]
}

View File

@@ -4,10 +4,18 @@ from __future__ import annotations
from airos.airos8 import AirOS8
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
_PLATFORMS: list[Platform] = [
@@ -21,13 +29,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(hass, verify_ssl=False)
session = async_get_clientsession(
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
)
airos_device = AirOS8(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=session,
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
@@ -40,6 +51,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
return True
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Migrate old config entry."""
if entry.version > 1:
# This means the user has downgraded from a future version
return False
if entry.version == 1 and entry.minor_version == 1:
new_data = {**entry.data}
advanced_data = {
CONF_SSL: DEFAULT_SSL,
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
}
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
hass.config_entries.async_update_entry(
entry,
data=new_data,
minor_version=2,
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
@@ -14,11 +15,23 @@ from airos.exceptions import (
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.data_entry_flow import section
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DOMAIN
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOS8
_LOGGER = logging.getLogger(__name__)
@@ -28,6 +41,15 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
vol.Required(CONF_HOST): str,
vol.Required(CONF_USERNAME, default="ubnt"): str,
vol.Required(CONF_PASSWORD): str,
vol.Required(SECTION_ADVANCED_SETTINGS): section(
vol.Schema(
{
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
}
),
{"collapsed": True},
),
}
)
@@ -36,47 +58,109 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Ubiquiti airOS."""
VERSION = 1
MINOR_VERSION = 2
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self.airos_device: AirOS8
self.errors: dict[str, str] = {}
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
"""Handle the manual input of host and credentials."""
self.errors = {}
if user_input is not None:
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(self.hass, verify_ssl=False)
airos_device = AirOS8(
host=user_input[CONF_HOST],
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=session,
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
self._abort_if_unique_id_configured()
validated_info = await self._validate_and_get_device_info(user_input)
if validated_info:
return self.async_create_entry(
title=airos_data.host.hostname, data=user_input
title=validated_info["title"],
data=validated_info["data"],
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
)
async def _validate_and_get_device_info(
self, config_data: dict[str, Any]
) -> dict[str, Any] | None:
"""Validate user input with the device API."""
# By default airOS 8 comes with self-signed SSL certificates,
# with no option in the web UI to change or upload a custom certificate.
session = async_get_clientsession(
self.hass,
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
)
airos_device = AirOS8(
host=config_data[CONF_HOST],
username=config_data[CONF_USERNAME],
password=config_data[CONF_PASSWORD],
session=session,
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
)
try:
await airos_device.login()
airos_data = await airos_device.status()
except (
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
):
self.errors["base"] = "cannot_connect"
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
self.errors["base"] = "invalid_auth"
except AirOSKeyDataMissingError:
self.errors["base"] = "key_data_missing"
except Exception:
_LOGGER.exception("Unexpected exception during credential validation")
self.errors["base"] = "unknown"
else:
await self.async_set_unique_id(airos_data.derived.mac)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return {"title": airos_data.host.hostname, "data": config_data}
return None
async def async_step_reauth(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm(user_input)
async def async_step_reauth_confirm(
self,
user_input: Mapping[str, Any],
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
self.errors = {}
if user_input:
validate_data = {**self._get_reauth_entry().data, **user_input}
if await self._validate_and_get_device_info(config_data=validate_data):
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates=validate_data,
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD,
autocomplete="current-password",
)
),
}
),
errors=self.errors,
)

View File

@@ -7,3 +7,8 @@ DOMAIN = "airos"
SCAN_INTERVAL = timedelta(minutes=1)
MANUFACTURER = "Ubiquiti"
DEFAULT_VERIFY_SSL = False
DEFAULT_SSL = True
SECTION_ADVANCED_SETTINGS = "advanced_settings"

View File

@@ -14,7 +14,7 @@ from airos.exceptions import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
try:
await self.airos_device.login()
return await self.airos_device.status()
except (AirOSConnectionAuthenticationError,) as err:
except AirOSConnectionAuthenticationError as err:
_LOGGER.exception("Error authenticating with airOS device")
raise ConfigEntryError(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="invalid_auth"
) from err
except (

View File

@@ -2,11 +2,11 @@
from __future__ import annotations
from homeassistant.const import CONF_HOST
from homeassistant.const import CONF_HOST, CONF_SSL
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
from .coordinator import AirOSDataUpdateCoordinator
@@ -20,9 +20,14 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
super().__init__(coordinator)
airos_data = self.coordinator.data
url_schema = (
"https"
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
else "http"
)
configuration_url: str | None = (
f"https://{coordinator.config_entry.data[CONF_HOST]}"
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
)
self._attr_device_info = DeviceInfo(

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airos",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["airos==0.5.1"]
"requirements": ["airos==0.5.3"]
}

View File

@@ -2,6 +2,14 @@
"config": {
"flow_title": "Ubiquiti airOS device",
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::airos::config::step::user::data_description::password%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]",
@@ -12,6 +20,18 @@
"host": "IP address or hostname of the airOS device",
"username": "Administrator username for the airOS device, normally 'ubnt'",
"password": "Password configured through the UISP app or web interface"
},
"sections": {
"advanced_settings": {
"data": {
"ssl": "Use HTTPS",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"ssl": "Whether the connection should be encrypted (required for most devices)",
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
}
}
}
}
},
@@ -22,7 +42,9 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
}
},
"entity": {

View File

@@ -10,6 +10,7 @@ from aioamazondevices.api import AmazonDevice
from aioamazondevices.const import SENSOR_STATE_OFF
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
@@ -20,6 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import async_update_unique_id
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@@ -31,6 +33,7 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
is_on_fn: Callable[[AmazonDevice, str], bool]
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
BINARY_SENSORS: Final = (
@@ -41,46 +44,15 @@ BINARY_SENSORS: Final = (
is_on_fn=lambda device, _: device.online,
),
AmazonBinarySensorEntityDescription(
key="bluetooth",
entity_category=EntityCategory.DIAGNOSTIC,
translation_key="bluetooth",
is_on_fn=lambda device, _: device.bluetooth_state,
),
AmazonBinarySensorEntityDescription(
key="babyCryDetectionState",
translation_key="baby_cry_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="beepingApplianceDetectionState",
translation_key="beeping_appliance_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="coughDetectionState",
translation_key="cough_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="dogBarkDetectionState",
translation_key="dog_bark_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="humanPresenceDetectionState",
key="detectionState",
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_supported=lambda device, key: device.sensors.get(key) is not None,
),
AmazonBinarySensorEntityDescription(
key="waterSoundsDetectionState",
translation_key="water_sounds_detection",
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
is_on_fn=lambda device, key: bool(
device.sensors[key].value != SENSOR_STATE_OFF
),
is_supported=lambda device, key: device.sensors.get(key) is not None,
is_available_fn=lambda device, key: (
device.online and device.sensors[key].error is False
),
),
)
@@ -94,6 +66,15 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "detectionState" binary sensor
await async_update_unique_id(
hass,
coordinator,
BINARY_SENSOR_DOMAIN,
"humanPresenceDetectionState",
"detectionState",
)
known_devices: set[str] = set()
def _check_device() -> None:
@@ -125,3 +106,13 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
return self.entity_description.is_on_fn(
self.device, self.entity_description.key
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
data = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError):
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
)
except CannotConnect:
errors["base"] = "cannot_connect"
except (CannotAuthenticate, TypeError):
except CannotAuthenticate:
errors["base"] = "invalid_auth"
except CannotRetrieveData:
errors["base"] = "cannot_retrieve_data"

View File

@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
translation_key="cannot_retrieve_data_with_error",
translation_placeholders={"error": repr(err)},
) from err
except (CannotAuthenticate, TypeError) as err:
except CannotAuthenticate as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",

View File

@@ -60,7 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
"online": device.online,
"serial number": device.serial_number,
"software version": device.software_version,
"do not disturb": device.do_not_disturb,
"response style": device.response_style,
"bluetooth state": device.bluetooth_state,
"sensors": device.sensors,
}

View File

@@ -1,44 +1,4 @@
{
"entity": {
"binary_sensor": {
"bluetooth": {
"default": "mdi:bluetooth-off",
"state": {
"on": "mdi:bluetooth"
}
},
"baby_cry_detection": {
"default": "mdi:account-voice-off",
"state": {
"on": "mdi:account-voice"
}
},
"beeping_appliance_detection": {
"default": "mdi:bell-off",
"state": {
"on": "mdi:bell-ring"
}
},
"cough_detection": {
"default": "mdi:blur-off",
"state": {
"on": "mdi:blur"
}
},
"dog_bark_detection": {
"default": "mdi:dog-side-off",
"state": {
"on": "mdi:dog-side"
}
},
"water_sounds_detection": {
"default": "mdi:water-pump-off",
"state": {
"on": "mdi:water-pump"
}
}
}
},
"services": {
"send_sound": {
"service": "mdi:cast-audio"

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "silver",
"requirements": ["aioamazondevices==6.0.0"]
"quality_scale": "platinum",
"requirements": ["aioamazondevices==6.2.7"]
}

View File

@@ -31,6 +31,9 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
"""Amazon Devices sensor entity description."""
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
SENSORS: Final = (
@@ -99,3 +102,13 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.device.sensors[self.entity_description.key].value
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -58,26 +58,6 @@
}
},
"entity": {
"binary_sensor": {
"bluetooth": {
"name": "Bluetooth"
},
"baby_cry_detection": {
"name": "Baby crying"
},
"beeping_appliance_detection": {
"name": "Beeping appliance"
},
"cough_detection": {
"name": "Coughing"
},
"dog_bark_detection": {
"name": "Dog barking"
},
"water_sounds_detection": {
"name": "Water sounds"
}
},
"notify": {
"speak": {
"name": "Speak"

View File

@@ -8,13 +8,17 @@ from typing import TYPE_CHECKING, Any, Final
from aioamazondevices.api import AmazonDevice
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
SwitchEntity,
SwitchEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AmazonConfigEntry
from .entity import AmazonEntity
from .utils import alexa_api_call
from .utils import alexa_api_call, async_update_unique_id
PARALLEL_UPDATES = 1
@@ -24,16 +28,17 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
"""Alexa Devices switch entity description."""
is_on_fn: Callable[[AmazonDevice], bool]
subkey: str
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
device.online and device.sensors[key].error is False
)
method: str
SWITCHES: Final = (
AmazonSwitchEntityDescription(
key="do_not_disturb",
subkey="AUDIO_PLAYER",
key="dnd",
translation_key="do_not_disturb",
is_on_fn=lambda _device: _device.do_not_disturb,
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
method="set_do_not_disturb",
),
)
@@ -48,6 +53,11 @@ async def async_setup_entry(
coordinator = entry.runtime_data
# Replace unique id for "DND" switch and remove from Speaker Group
await async_update_unique_id(
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
)
known_devices: set[str] = set()
def _check_device() -> None:
@@ -59,7 +69,7 @@ async def async_setup_entry(
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
for switch_desc in SWITCHES
for serial_num in new_devices
if switch_desc.subkey in coordinator.data[serial_num].capabilities
if switch_desc.key in coordinator.data[serial_num].sensors
)
_check_device()
@@ -94,3 +104,13 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
def is_on(self) -> bool:
"""Return True if switch is on."""
return self.entity_description.is_on_fn(self.device)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
self.entity_description.is_available_fn(
self.device, self.entity_description.key
)
and super().available
)

View File

@@ -6,9 +6,12 @@ from typing import Any, Concatenate
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.entity_registry as er
from .const import DOMAIN
from .const import _LOGGER, DOMAIN
from .coordinator import AmazonDevicesCoordinator
from .entity import AmazonEntity
@@ -38,3 +41,23 @@ def alexa_api_call[_T: AmazonEntity, **_P](
) from err
return cmd_wrapper
async def async_update_unique_id(
hass: HomeAssistant,
coordinator: AmazonDevicesCoordinator,
domain: str,
old_key: str,
new_key: str,
) -> None:
"""Update unique id for entities created with old format."""
entity_registry = er.async_get(hass)
for serial_num in coordinator.data:
unique_id = f"{serial_num}-{old_key}"
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
_LOGGER.debug("Updating unique_id for %s", entity_id)
new_unique_id = unique_id.replace(old_key, new_key)
# Update the registry with the new unique_id
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)

View File

@@ -551,7 +551,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
for domain, integration_info in integration_inputs.items()
if (integration := integrations.get(domain)) is not None
and integration.is_built_in
and integration.integration_type in ("device", "hub")
and integration.manifest.get("integration_type") in ("device", "hub")
}
# Call integrations that implement the analytics platform

View File

@@ -1308,7 +1308,9 @@ class PipelineRun:
# instead of a full response.
all_targets_in_satellite_area = (
self._get_all_targets_in_satellite_area(
conversation_result.response, self._device_id
conversation_result.response,
self._satellite_id,
self._device_id,
)
)
@@ -1337,39 +1339,62 @@ class PipelineRun:
return (speech, all_targets_in_satellite_area)
def _get_all_targets_in_satellite_area(
self, intent_response: intent.IntentResponse, device_id: str | None
self,
intent_response: intent.IntentResponse,
satellite_id: str | None,
device_id: str | None,
) -> bool:
"""Return true if all targeted entities were in the same area as the device."""
if (
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
or (not intent_response.matched_states)
or (not device_id)
):
return False
device_registry = dr.async_get(self.hass)
if (not (device := device_registry.async_get(device_id))) or (
not device.area_id
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
or not intent_response.matched_states
):
return False
entity_registry = er.async_get(self.hass)
for state in intent_response.matched_states:
entity = entity_registry.async_get(state.entity_id)
if not entity:
device_registry = dr.async_get(self.hass)
area_id: str | None = None
if (
satellite_id is not None
and (target_entity_entry := entity_registry.async_get(satellite_id))
is not None
):
area_id = target_entity_entry.area_id
device_id = target_entity_entry.device_id
if area_id is None:
if device_id is None:
return False
if (entity_area_id := entity.area_id) is None:
if (entity.device_id is None) or (
(entity_device := device_registry.async_get(entity.device_id))
is None
):
device_entry = device_registry.async_get(device_id)
if device_entry is None:
return False
area_id = device_entry.area_id
if area_id is None:
return False
for state in intent_response.matched_states:
target_entity_entry = entity_registry.async_get(state.entity_id)
if target_entity_entry is None:
return False
target_area_id = target_entity_entry.area_id
if target_area_id is None:
if target_entity_entry.device_id is None:
return False
entity_area_id = entity_device.area_id
target_device_entry = device_registry.async_get(
target_entity_entry.device_id
)
if target_device_entry is None:
return False
if entity_area_id != device.area_id:
target_area_id = target_device_entry.area_id
if target_area_id != area_id:
return False
return True

View File

@@ -3,16 +3,12 @@ beolink_allstandby:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_expand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
all_discovered:
required: false
@@ -37,8 +33,6 @@ beolink_join:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false
@@ -71,16 +65,12 @@ beolink_leave:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
beolink_unexpand:
target:
entity:
integration: bang_olufsen
domain: media_player
device:
integration: bang_olufsen
fields:
jid_options:
collapsed: false

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.1.1"],
"requirements": ["hass-nabucasa==1.1.2"],
"single_config_entry": true
}

View File

@@ -25,23 +25,27 @@ from .const import _LOGGER, DEFAULT_PORT, DEVICE_TYPE_LIST, DOMAIN
from .utils import async_client_session
DEFAULT_HOST = "192.168.1.252"
DEFAULT_PIN = 111111
DEFAULT_PIN = "111111"
pin_regex = r"^[0-9]{4,10}$"
USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.positive_int})
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
)
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.positive_int,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
}
)

View File

@@ -7,6 +7,6 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["aiocomelit"],
"quality_scale": "silver",
"quality_scale": "platinum",
"requirements": ["aiocomelit==0.12.3"]
}

View File

@@ -32,6 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SOURCE: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_handle_source_entity_changes(
@@ -46,15 +47,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))

View File

@@ -140,6 +140,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1
MINOR_VERSION = 4

View File

@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
import voluptuous as vol
from homeassistant.const import CONF_DOMAIN
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.condition import (
Condition,
ConditionCheckerType,
ConditionConfig,
trace_condition_function,
)
from homeassistant.helpers.typing import ConfigType
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
class DeviceCondition(Condition):
"""Device condition."""
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
"""Initialize condition."""
self._config = config
self._hass = hass
_hass: HomeAssistant
_config: ConfigType
@classmethod
async def async_validate_complete_config(
cls, hass: HomeAssistant, complete_config: ConfigType
) -> ConfigType:
"""Validate complete config."""
complete_config = await async_validate_device_automation_config(
hass,
complete_config,
cv.DEVICE_CONDITION_SCHEMA,
DeviceAutomationType.CONDITION,
)
# Since we don't want to migrate device conditions to a new format
# we just pass the entire config as options.
complete_config[CONF_OPTIONS] = complete_config.copy()
return complete_config
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate device condition config."""
return await async_validate_device_automation_config(
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
)
"""Validate config.
This is here just to satisfy the abstract class interface. It is never called.
"""
raise NotImplementedError
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize condition."""
self._hass = hass
assert config.options is not None
self._config = config.options
async def async_get_checker(self) -> condition.ConditionCheckerType:
"""Test a device condition."""

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from datetime import timedelta
from ipaddress import IPv4Address, IPv6Address
import logging
@@ -88,8 +89,8 @@ class WanIpSensor(SensorEntity):
self._attr_name = "IPv6" if ipv6 else None
self._attr_unique_id = f"{hostname}_{ipv6}"
self.hostname = hostname
self.resolver = aiodns.DNSResolver(tcp_port=port, udp_port=port)
self.resolver.nameservers = [resolver]
self.port = port
self._resolver = resolver
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
self._retries = DEFAULT_RETRIES
self._attr_extra_state_attributes = {
@@ -103,14 +104,26 @@ class WanIpSensor(SensorEntity):
model=aiodns.__version__,
name=name,
)
self.resolver: aiodns.DNSResolver
self.create_dns_resolver()
def create_dns_resolver(self) -> None:
"""Create the DNS resolver."""
self.resolver = aiodns.DNSResolver(tcp_port=self.port, udp_port=self.port)
self.resolver.nameservers = [self._resolver]
async def async_update(self) -> None:
"""Get the current DNS IP address for hostname."""
if self.resolver._closed: # noqa: SLF001
self.create_dns_resolver()
response = None
try:
response = await self.resolver.query(self.hostname, self.querytype)
async with asyncio.timeout(10):
response = await self.resolver.query(self.hostname, self.querytype)
except TimeoutError:
await self.resolver.close()
except DNSError as err:
_LOGGER.warning("Exception while resolving host: %s", err)
response = None
if response:
sorted_ips = sort_ips(

View File

@@ -116,7 +116,11 @@ class EbusdData:
try:
_LOGGER.debug("Opening socket to ebusd %s", name)
command_result = ebusdpy.write(self._address, self._circuit, name, value)
if command_result is not None and "done" not in command_result:
if (
command_result is not None
and "done" not in command_result
and "empty" not in command_result
):
_LOGGER.warning("Write command failed: %s", name)
except RuntimeError as err:
_LOGGER.error(err)

View File

@@ -69,7 +69,9 @@ class EcovacsMap(
await super().async_added_to_hass()
async def on_info(event: CachedMapInfoEvent) -> None:
self._attr_extra_state_attributes["map_name"] = event.name
for map_obj in event.maps:
if map_obj.using:
self._attr_extra_state_attributes["map_name"] = map_obj.name
async def on_changed(event: MapChangedEvent) -> None:
self._attr_image_last_updated = event.when

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==14.0.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==15.0.0"]
}

View File

@@ -3,14 +3,15 @@
from __future__ import annotations
from datetime import timedelta
from enum import IntEnum
import logging
from typing import Any
from pyephember2.pyephember2 import (
EphEmber,
ZoneMode,
boiler_state,
zone_current_temperature,
zone_is_active,
zone_is_hotwater,
zone_mode,
zone_name,
@@ -53,6 +54,15 @@ EPH_TO_HA_STATE = {
"OFF": HVACMode.OFF,
}
class EPHBoilerStates(IntEnum):
"""Boiler states for a zone given by the api."""
FIXME = 0
OFF = 1
ON = 2
HA_STATE_TO_EPH = {value: key for key, value in EPH_TO_HA_STATE.items()}
@@ -123,7 +133,7 @@ class EphEmberThermostat(ClimateEntity):
@property
def hvac_action(self) -> HVACAction:
"""Return current HVAC action."""
if zone_is_active(self._zone):
if boiler_state(self._zone) == EPHBoilerStates.ON:
return HVACAction.HEATING
return HVACAction.IDLE

View File

@@ -57,6 +57,7 @@ from .manager import async_replace_device
ERROR_REQUIRES_ENCRYPTION_KEY = "requires_encryption_key"
ERROR_INVALID_ENCRYPTION_KEY = "invalid_psk"
ERROR_INVALID_PASSWORD_AUTH = "invalid_auth"
_LOGGER = logging.getLogger(__name__)
ZERO_NOISE_PSK = "MDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA="
@@ -137,6 +138,11 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
self._password = ""
return await self._async_authenticate_or_add()
if error == ERROR_INVALID_PASSWORD_AUTH or (
error is None and self._device_info and self._device_info.uses_password
):
return await self.async_step_authenticate()
if error is None and entry_data.get(CONF_NOISE_PSK):
# Device was configured with encryption but now connects without it.
# Check if it's the same device before offering to remove encryption.
@@ -690,13 +696,15 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
cli = APIClient(
host,
port or DEFAULT_PORT,
"",
self._password or "",
zeroconf_instance=zeroconf_instance,
noise_psk=noise_psk,
)
try:
await cli.connect()
self._device_info = await cli.device_info()
except InvalidAuthAPIError:
return ERROR_INVALID_PASSWORD_AUTH
except RequiresEncryptionAPIError:
return ERROR_REQUIRES_ENCRYPTION_KEY
except InvalidEncryptionKeyAPIError as ex:

View File

@@ -372,6 +372,9 @@ class ESPHomeManager:
"""Subscribe to states and list entities on successful API login."""
try:
await self._on_connect()
except InvalidAuthAPIError as err:
_LOGGER.warning("Authentication failed for %s: %s", self.host, err)
await self._start_reauth_and_disconnect()
except APIConnectionError as err:
_LOGGER.warning(
"Error getting setting up connection for %s: %s", self.host, err
@@ -641,7 +644,14 @@ class ESPHomeManager:
if self.reconnect_logic:
await self.reconnect_logic.stop()
return
await self._start_reauth_and_disconnect()
async def _start_reauth_and_disconnect(self) -> None:
"""Start reauth flow and stop reconnection attempts."""
self.entry.async_start_reauth(self.hass)
await self.cli.disconnect()
if self.reconnect_logic:
await self.reconnect_logic.stop()
async def _handle_dynamic_encryption_key(
self, device_info: EsphomeDeviceInfo
@@ -1063,7 +1073,7 @@ def _async_register_service(
service_name,
{
"description": (
f"Calls the service {service.name} of the node {device_info.name}"
f"Performs the action {service.name} of the node {device_info.name}"
),
"fields": fields,
},

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==41.9.0",
"aioesphomeapi==41.11.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.3.0"
],

View File

@@ -26,11 +26,14 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
super().__init__(coordinator)
self._serial = serial
self._camera_name = self.data["name"]
connections = set()
if mac_address := self.data["mac_address"]:
connections.add((CONNECTION_NETWORK_MAC, mac_address))
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial)},
connections={
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
},
connections=connections,
manufacturer=MANUFACTURER,
model=self.data["device_sub_category"],
name=self.data["name"],
@@ -62,11 +65,14 @@ class EzvizBaseEntity(Entity):
self._serial = serial
self.coordinator = coordinator
self._camera_name = self.data["name"]
connections = set()
if mac_address := self.data["mac_address"]:
connections.add((CONNECTION_NETWORK_MAC, mac_address))
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial)},
connections={
(CONNECTION_NETWORK_MAC, self.data["mac_address"]),
},
connections=connections,
manufacturer=MANUFACTURER,
model=self.data["device_sub_category"],
name=self.data["name"],

View File

@@ -10,7 +10,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Filter from a config entry."""
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -18,8 +17,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Filter config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -246,6 +246,7 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -46,6 +46,9 @@ async def async_get_config_entry_diagnostics(
}
for _, device in avm_wrapper.devices.items()
],
"cpu_temperatures": await hass.async_add_executor_job(
avm_wrapper.fritz_status.get_cpu_temperatures
),
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
},
}

View File

@@ -20,5 +20,5 @@
"documentation": "https://www.home-assistant.io/integrations/frontend",
"integration_type": "system",
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20250924.0"]
"requirements": ["home-assistant-frontend==20250926.0"]
}

View File

@@ -1,8 +1,10 @@
load_url:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
url:
example: "https://home-assistant.io"
required: true
@@ -10,10 +12,12 @@ load_url:
text:
set_config:
target:
device:
integration: fully_kiosk
fields:
device_id:
required: true
selector:
device:
integration: fully_kiosk
key:
example: "motionSensitivity"
required: true
@@ -26,12 +30,14 @@ set_config:
text:
start_application:
target:
device:
integration: fully_kiosk
fields:
application:
example: "de.ozerov.fully"
required: true
selector:
text:
device_id:
required: true
selector:
device:
integration: fully_kiosk

View File

@@ -147,6 +147,10 @@
"name": "Load URL",
"description": "Loads a URL on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "Device ID",
"description": "The target device for this action."
},
"url": {
"name": "[%key:common::config_flow::data::url%]",
"description": "URL to load."
@@ -157,6 +161,10 @@
"name": "Set configuration",
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
"fields": {
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
},
"key": {
"name": "Key",
"description": "Configuration parameter to set."
@@ -174,6 +182,10 @@
"application": {
"name": "Application",
"description": "Package name of the application to start."
},
"device_id": {
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
}
}
}

View File

@@ -108,6 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the humidifer,
@@ -140,6 +141,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -148,7 +150,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -186,11 +187,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -96,6 +96,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -35,6 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_HEATER: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the heater, but
@@ -67,6 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_SENSOR: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_track_entity_registry_updated_event(
@@ -75,7 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
@@ -113,11 +114,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -104,6 +104,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -77,10 +77,10 @@ class GeniusDevice(GeniusEntity):
async def async_update(self) -> None:
"""Update an entity's state data."""
if "_state" in self._device.data: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(
self._device.data["_state"]["lastComms"]
)
if (state := self._device.data.get("_state")) and (
last_comms := state.get("lastComms")
) is not None: # only via v3 API
self._last_comms = dt_util.utc_from_timestamp(last_comms)
class GeniusZone(GeniusEntity):

View File

@@ -1,7 +1,5 @@
set_vacation:
target:
device:
integration: google_mail
entity:
integration: google_mail
fields:

View File

@@ -141,15 +141,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await hass.config_entries.async_forward_entry_setups(
entry, (entry.options["group_type"],)
)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
return True
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update listener, called when the config entry options are changed."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(

View File

@@ -329,6 +329,7 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
@callback
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:

View File

@@ -65,6 +65,7 @@ async def async_setup_entry(
entry,
options={**entry.options, CONF_ENTITY_ID: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
async def source_entity_removed() -> None:
# The source entity has been removed, we remove the config entry because
@@ -86,7 +87,6 @@ async def async_setup_entry(
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -130,8 +130,3 @@ async def async_unload_entry(
) -> bool:
"""Unload History stats config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -162,6 +162,7 @@ class HistoryStatsConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -22,6 +22,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.19.0"],
"requirements": ["aiohomeconnect==0.20.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -32,15 +32,12 @@ set_location:
stop:
toggle:
target:
entity: {}
turn_on:
target:
entity: {}
turn_off:
target:
entity: {}
update_entity:
fields:
@@ -53,8 +50,6 @@ update_entity:
reload_custom_templates:
reload_config_entry:
target:
entity: {}
device: {}
fields:
entry_id:
advanced: true

View File

@@ -145,7 +145,11 @@ class HueMotionSensor(HueBaseEntity, BinarySensorEntity):
if not self.resource.enabled:
# Force None (unknown) if the sensor is set to disabled in Hue
return None
return self.resource.motion.value
if not (motion_feature := self.resource.motion):
return None
if motion_feature.motion_report is not None:
return motion_feature.motion_report.motion
return motion_feature.motion
# pylint: disable-next=hass-enforce-class-module

View File

@@ -3,7 +3,9 @@
from __future__ import annotations
import asyncio
from email.message import Message
import logging
from typing import Any
from aioimaplib import IMAP4_SSL, AioImapException, Response
import voluptuous as vol
@@ -33,6 +35,7 @@ from .coordinator import (
ImapPollingDataUpdateCoordinator,
ImapPushDataUpdateCoordinator,
connect_to_server,
get_parts,
)
from .errors import InvalidAuth, InvalidFolder
@@ -40,6 +43,7 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
CONF_ENTRY = "entry"
CONF_SEEN = "seen"
CONF_PART = "part"
CONF_UID = "uid"
CONF_TARGET_FOLDER = "target_folder"
@@ -64,6 +68,11 @@ SERVICE_MOVE_SCHEMA = _SERVICE_UID_SCHEMA.extend(
)
SERVICE_DELETE_SCHEMA = _SERVICE_UID_SCHEMA
SERVICE_FETCH_TEXT_SCHEMA = _SERVICE_UID_SCHEMA
SERVICE_FETCH_PART_SCHEMA = _SERVICE_UID_SCHEMA.extend(
{
vol.Required(CONF_PART): cv.string,
}
)
type ImapConfigEntry = ConfigEntry[ImapDataUpdateCoordinator]
@@ -216,12 +225,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
translation_placeholders={"error": str(exc)},
) from exc
raise_on_error(response, "fetch_failed")
# Index 1 of of the response lines contains the bytearray with the message data
message = ImapMessage(response.lines[1])
await client.close()
return {
"text": message.text,
"sender": message.sender,
"subject": message.subject,
"parts": get_parts(message.email_message),
"uid": uid,
}
@@ -233,6 +244,73 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
supports_response=SupportsResponse.ONLY,
)
async def async_fetch_part(call: ServiceCall) -> ServiceResponse:
"""Process fetch email part service and return content."""
@callback
def get_message_part(message: Message, part_key: str) -> Message:
part: Message | Any = message
for index in part_key.split(","):
sub_parts = part.get_payload()
try:
assert isinstance(sub_parts, list)
part = sub_parts[int(index)]
except (AssertionError, ValueError, IndexError) as exc:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_part_index",
) from exc
return part
entry_id: str = call.data[CONF_ENTRY]
uid: str = call.data[CONF_UID]
part_key: str = call.data[CONF_PART]
_LOGGER.debug(
"Fetch part %s for message %s. Entry: %s",
part_key,
uid,
entry_id,
)
client = await async_get_imap_client(hass, entry_id)
try:
response = await client.fetch(uid, "BODY.PEEK[]")
except (TimeoutError, AioImapException) as exc:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="imap_server_fail",
translation_placeholders={"error": str(exc)},
) from exc
raise_on_error(response, "fetch_failed")
# Index 1 of of the response lines contains the bytearray with the message data
message = ImapMessage(response.lines[1])
await client.close()
part_data = get_message_part(message.email_message, part_key)
part_data_content = part_data.get_payload(decode=False)
try:
assert isinstance(part_data_content, str)
except AssertionError as exc:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_part_index",
) from exc
return {
"part_data": part_data_content,
"content_type": part_data.get_content_type(),
"content_transfer_encoding": part_data.get("Content-Transfer-Encoding"),
"filename": part_data.get_filename(),
"part": part_key,
"uid": uid,
}
hass.services.async_register(
DOMAIN,
"fetch_part",
async_fetch_part,
SERVICE_FETCH_PART_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
return True

View File

@@ -21,7 +21,7 @@ from homeassistant.const import (
CONF_VERIFY_SSL,
CONTENT_TYPE_TEXT_PLAIN,
)
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
@@ -209,6 +209,28 @@ class ImapMessage:
return str(self.email_message.get_payload())
@callback
def get_parts(message: Message, prefix: str | None = None) -> dict[str, Any]:
"""Return information about the parts of a multipart message."""
parts: dict[str, Any] = {}
if not message.is_multipart():
return {}
for index, part in enumerate(message.get_payload(), 0):
if TYPE_CHECKING:
assert isinstance(part, Message)
key = f"{prefix},{index}" if prefix else f"{index}"
if part.is_multipart():
parts |= get_parts(part, key)
continue
parts[key] = {"content_type": part.get_content_type()}
if filename := part.get_filename():
parts[key]["filename"] = filename
if content_transfer_encoding := part.get("Content-Transfer-Encoding"):
parts[key]["content_transfer_encoding"] = content_transfer_encoding
return parts
class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
"""Base class for imap client."""
@@ -275,6 +297,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
"sender": message.sender,
"subject": message.subject,
"uid": last_message_uid,
"parts": get_parts(message.email_message),
}
data.update({key: getattr(message, key) for key in self._event_data_keys})
if self.custom_event_template is not None:

View File

@@ -21,6 +21,9 @@
},
"fetch": {
"service": "mdi:email-sync-outline"
},
"fetch_part": {
"service": "mdi:email-sync-outline"
}
}
}

View File

@@ -56,3 +56,22 @@ fetch:
example: "12"
selector:
text:
fetch_part:
fields:
entry:
required: true
selector:
config_entry:
integration: "imap"
uid:
required: true
example: "12"
selector:
text:
part:
required: true
example: "0,1"
selector:
text:

View File

@@ -84,6 +84,9 @@
"imap_server_fail": {
"message": "The IMAP server failed to connect: {error}."
},
"invalid_part_index": {
"message": "Invalid part index."
},
"seen_failed": {
"message": "Marking message as seen failed with \"{error}\"."
}
@@ -148,6 +151,24 @@
}
}
},
"fetch_part": {
"name": "Fetch message part",
"description": "Fetches a message part or attachment from an email message.",
"fields": {
"entry": {
"name": "[%key:component::imap::services::fetch::fields::entry::name%]",
"description": "[%key:component::imap::services::fetch::fields::entry::description%]"
},
"uid": {
"name": "[%key:component::imap::services::fetch::fields::uid::name%]",
"description": "[%key:component::imap::services::fetch::fields::uid::description%]"
},
"part": {
"name": "Part",
"description": "The message part index."
}
}
},
"seen": {
"name": "Mark message as seen",
"description": "Marks an email as seen.",

View File

@@ -142,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
)
coordinators = LaMarzoccoRuntimeData(
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),

View File

@@ -8,7 +8,7 @@ from datetime import timedelta
import logging
from typing import Any
from pylamarzocco import LaMarzoccoMachine
from pylamarzocco import LaMarzoccoCloudClient, LaMarzoccoMachine
from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful
from homeassistant.config_entries import ConfigEntry
@@ -19,7 +19,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .const import DOMAIN
SCAN_INTERVAL = timedelta(seconds=15)
SCAN_INTERVAL = timedelta(seconds=60)
SETTINGS_UPDATE_INTERVAL = timedelta(hours=8)
SCHEDULE_UPDATE_INTERVAL = timedelta(minutes=30)
STATISTICS_UPDATE_INTERVAL = timedelta(minutes=15)
@@ -51,6 +51,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
hass: HomeAssistant,
entry: LaMarzoccoConfigEntry,
device: LaMarzoccoMachine,
cloud_client: LaMarzoccoCloudClient | None = None,
) -> None:
"""Initialize coordinator."""
super().__init__(
@@ -61,6 +62,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
update_interval=self._default_update_interval,
)
self.device = device
self.cloud_client = cloud_client
async def _async_update_data(self) -> None:
"""Do the data update."""
@@ -85,11 +87,17 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]):
class LaMarzoccoConfigUpdateCoordinator(LaMarzoccoUpdateCoordinator):
"""Class to handle fetching data from the La Marzocco API centrally."""
cloud_client: LaMarzoccoCloudClient
async def _internal_async_update_data(self) -> None:
"""Fetch data from API endpoint."""
# ensure token stays valid; does nothing if token is still valid
await self.cloud_client.async_get_access_token()
if self.device.websocket.connected:
return
await self.device.get_dashboard()
_LOGGER.debug("Current status: %s", self.device.dashboard.to_dict())

View File

@@ -37,5 +37,5 @@
"iot_class": "cloud_push",
"loggers": ["pylamarzocco"],
"quality_scale": "platinum",
"requirements": ["pylamarzocco==2.1.0"]
"requirements": ["pylamarzocco==2.1.1"]
}

View File

@@ -12,7 +12,7 @@ from homeassistant.components.number import (
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import PRECISION_WHOLE, EntityCategory
from homeassistant.const import PRECISION_WHOLE, EntityCategory, UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -72,6 +72,7 @@ NUMBERS: tuple[LetPotNumberEntityDescription, ...] = (
LetPotNumberEntityDescription(
key="plant_days",
translation_key="plant_days",
native_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda coordinator: coordinator.data.plant_days,
set_value_fn=(
lambda device_client, serial, value: device_client.set_plant_days(

View File

@@ -54,8 +54,7 @@
"name": "Light brightness"
},
"plant_days": {
"name": "Plants age",
"unit_of_measurement": "days"
"name": "Plants age"
}
},
"select": {

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/libre_hardware_monitor",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.3.1"]
"requirements": ["librehardwaremonitor-api==1.4.0"]
}

View File

@@ -25,5 +25,10 @@
"turn_on": {
"service": "mdi:lightbulb-on"
}
},
"triggers": {
"state": {
"trigger": "mdi:state-machine"
}
}
}

View File

@@ -133,6 +133,13 @@
}
},
"selector": {
"behavior": {
"options": {
"first": "First",
"last": "Last",
"any": "Any"
}
},
"color_name": {
"options": {
"homeassistant": "Home Assistant",
@@ -290,6 +297,12 @@
"short": "Short",
"long": "Long"
}
},
"state": {
"options": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
},
"services": {
@@ -462,5 +475,22 @@
}
}
}
},
"triggers": {
"state": {
"name": "State",
"description": "When the state of a light changes, such as turning on or off.",
"description_configured": "When the state of a light changes",
"fields": {
"state": {
"name": "State",
"description": "The state to trigger on."
},
"behavior": {
"name": "Behavior",
"description": "The behavior of the targeted entities to trigger on."
}
}
}
}
}

View File

@@ -0,0 +1,165 @@
"""Provides triggers for lights."""
from typing import Final, cast, override
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_PLATFORM,
CONF_STATE,
CONF_TARGET,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import (
CALLBACK_TYPE,
HassJob,
HomeAssistant,
callback,
split_entity_id,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import process_state_match
from homeassistant.helpers.target import (
TargetStateChangedData,
async_track_target_selector_state_change_event,
)
from homeassistant.helpers.trigger import Trigger, TriggerActionType, TriggerInfo
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
# remove when #151314 is merged
CONF_OPTIONS: Final = "options"
ATTR_BEHAVIOR: Final = "behavior"
BEHAVIOR_FIRST: Final = "first"
BEHAVIOR_LAST: Final = "last"
BEHAVIOR_ANY: Final = "any"
STATE_PLATFORM_TYPE: Final = "state"
STATE_TRIGGER_SCHEMA = vol.Schema(
{
vol.Required(CONF_OPTIONS): {
vol.Required(CONF_STATE): vol.In([STATE_ON, STATE_OFF]),
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
[BEHAVIOR_FIRST, BEHAVIOR_LAST, BEHAVIOR_ANY]
),
},
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
}
)
class StateTrigger(Trigger):
"""Trigger for state changes."""
@override
@classmethod
async def async_validate_config(
cls, hass: HomeAssistant, config: ConfigType
) -> ConfigType:
"""Validate config."""
return cast(ConfigType, STATE_TRIGGER_SCHEMA(config))
def __init__(self, hass: HomeAssistant, config: dict) -> None:
"""Initialize the state trigger."""
self.hass = hass
self.config = config
@override
async def async_attach(
self, action: TriggerActionType, trigger_info: TriggerInfo
) -> CALLBACK_TYPE:
"""Attach the trigger."""
job = HassJob(action, f"light state trigger {trigger_info}")
trigger_data = trigger_info["trigger_data"]
config_options = self.config[CONF_OPTIONS]
match_config_state = process_state_match(config_options.get(CONF_STATE))
def check_all_match(entity_ids: set[str]) -> bool:
"""Check if all entity states match."""
return all(
match_config_state(state.state)
for entity_id in entity_ids
if (state := self.hass.states.get(entity_id)) is not None
)
def check_one_match(entity_ids: set[str]) -> bool:
"""Check that only one entity state matches."""
return (
sum(
match_config_state(state.state)
for entity_id in entity_ids
if (state := self.hass.states.get(entity_id)) is not None
)
== 1
)
behavior = config_options.get(ATTR_BEHAVIOR)
@callback
def state_change_listener(
target_state_change_data: TargetStateChangedData,
) -> None:
"""Listen for state changes and call action."""
event = target_state_change_data.state_change_event
entity_id = event.data["entity_id"]
from_state = event.data["old_state"]
to_state = event.data["new_state"]
if to_state is None:
return
# This check is required for "first" behavior, to check that it went from zero
# entities matching the state to one. Otherwise, if previously there were two
# entities on CONF_STATE and one changed, this would trigger.
# For "last" behavior it is not required, but serves as a quicker fail check.
if not match_config_state(to_state.state):
return
if behavior == BEHAVIOR_LAST:
if not check_all_match(target_state_change_data.targeted_entity_ids):
return
elif behavior == BEHAVIOR_FIRST:
if not check_one_match(target_state_change_data.targeted_entity_ids):
return
self.hass.async_run_hass_job(
job,
{
"trigger": {
**trigger_data,
CONF_PLATFORM: self.config[CONF_PLATFORM],
ATTR_ENTITY_ID: entity_id,
"from_state": from_state,
"to_state": to_state,
"description": f"state of {entity_id}",
}
},
event.context,
)
def entity_filter(entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
return {
entity_id
for entity_id in entities
if split_entity_id(entity_id)[0] == DOMAIN
}
target_config = self.config[CONF_TARGET]
return async_track_target_selector_state_change_event(
self.hass, target_config, state_change_listener, entity_filter
)
TRIGGERS: dict[str, type[Trigger]] = {
STATE_PLATFORM_TYPE: StateTrigger,
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for lights."""
return TRIGGERS

View File

@@ -0,0 +1,22 @@
state:
target:
entity:
domain: light
fields:
state:
required: true
default: "on"
selector:
select:
options:
- "off"
- "on"
behavior:
required: true
default: any
selector:
select:
options:
- first
- last
- any

View File

@@ -28,7 +28,7 @@ rules:
docs-configuration-parameters:
status: done
comment: No options to configure
docs-installation-parameters: todo
docs-installation-parameters: done
entity-unavailable: todo
integration-owner: done
log-when-unavailable: todo

View File

@@ -22,7 +22,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -30,8 +29,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload Local file config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -65,6 +65,7 @@ class LocalFileConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
"""Return config entry title."""

View File

@@ -1,7 +1,5 @@
set_hold_time:
target:
device:
integration: lyric
entity:
integration: lyric
domain: climate

View File

@@ -9,6 +9,18 @@
"url": "The remote MCP server URL for the SSE endpoint, for example http://example/sse"
}
},
"credentials_choice": {
"title": "Choose how to authenticate with the MCP server",
"description": "You can either use existing credentials from another integration or set up new credentials.",
"menu_options": {
"new_credentials": "Set up new credentials",
"pick_implementation": "Use existing credentials"
},
"menu_option_descriptions": {
"new_credentials": "You will be guided through setting up a new OAuth Client ID and secret.",
"pick_implementation": "You may use previously entered OAuth credentials."
}
},
"pick_implementation": {
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
"data": {
@@ -27,14 +39,21 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"missing_capabilities": "The MCP server does not support a required capability (Tools)",
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
"reauth_account_mismatch": "The authenticated user does not match the MCP Server user that needed re-authentication.",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
"unknown": "[%key:common::config_flow::error::unknown%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]"
}
}
}

View File

@@ -7,8 +7,9 @@ from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionE
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_PORT, CONF_VERIFY_SSL
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
from .const import DOMAIN, LOGGER, MIN_REQUIRED_MEALIE_VERSION
from .utils import create_version
@@ -25,13 +26,21 @@ REAUTH_SCHEMA = vol.Schema(
vol.Required(CONF_API_TOKEN): str,
}
)
DISCOVERY_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_TOKEN): str,
}
)
class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
"""Mealie config flow."""
VERSION = 1
host: str | None = None
verify_ssl: bool = True
_hassio_discovery: dict[str, Any] | None = None
async def check_connection(
self, api_token: str
@@ -143,3 +152,59 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=USER_SCHEMA,
errors=errors,
)
async def async_step_hassio(
self, discovery_info: HassioServiceInfo
) -> ConfigFlowResult:
"""Prepare configuration for a Mealie add-on.
This flow is triggered by the discovery component.
"""
await self._async_handle_discovery_without_unique_id()
self._hassio_discovery = discovery_info.config
return await self.async_step_hassio_confirm()
async def async_step_hassio_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm Supervisor discovery and prompt for API token."""
if user_input is None:
return await self._show_hassio_form()
assert self._hassio_discovery
self.host = (
f"{self._hassio_discovery[CONF_HOST]}:{self._hassio_discovery[CONF_PORT]}"
)
self.verify_ssl = True
errors, user_id = await self.check_connection(
user_input[CONF_API_TOKEN],
)
if not errors:
await self.async_set_unique_id(user_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title="Mealie",
data={
CONF_HOST: self.host,
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
CONF_VERIFY_SSL: self.verify_ssl,
},
)
return await self._show_hassio_form(errors)
async def _show_hassio_form(
self, errors: dict[str, str] | None = None
) -> ConfigFlowResult:
"""Show the Hass.io confirmation form to the user."""
assert self._hassio_discovery
return self.async_show_form(
step_id="hassio_confirm",
data_schema=DISCOVERY_SCHEMA,
description_placeholders={"addon": self._hassio_discovery["addon"]},
errors=errors or {},
)

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["aiomealie==0.10.2"]
"requirements": ["aiomealie==0.11.0"]
}

View File

@@ -39,8 +39,14 @@ rules:
# Gold
devices: done
diagnostics: done
discovery-update-info: todo
discovery: todo
discovery-update-info:
status: exempt
comment: |
This integration will only discover a Mealie addon that is local, not on the network.
discovery:
status: done
comment: |
The integration will discover a Mealie addon posting a discovery message.
docs-data-update: done
docs-examples: done
docs-known-limitations: todo

View File

@@ -39,6 +39,16 @@
"api_token": "[%key:component::mealie::common::data_description_api_token%]",
"verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]"
}
},
"hassio_confirm": {
"title": "Mealie via Home Assistant add-on",
"description": "Do you want to configure Home Assistant to connect to the Mealie instance provided by the add-on: {addon}?",
"data": {
"api_token": "[%key:common::config_flow::data::api_token%]"
},
"data_description": {
"api_token": "[%key:component::mealie::common::data_description_api_token%]"
}
}
},
"error": {
@@ -50,6 +60,7 @@
},
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_account": "You have to use the same account that was used to configure the integration."

View File

@@ -39,6 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, CONF_INDOOR_HUMIDITY: source_entity_id},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
# We use async_handle_source_entity_changes to track changes to the humidity
@@ -79,6 +80,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry,
options={**entry.options, temp_sensor: data["entity_id"]},
)
hass.config_entries.async_schedule_reload(entry.entry_id)
return async_sensor_updated
@@ -89,7 +91,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.async_on_unload(entry.add_update_listener(update_listener))
return True
@@ -99,11 +100,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Migrate old entry."""

View File

@@ -100,6 +100,7 @@ class MoldIndicatorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
config_flow = CONFIG_FLOW
options_flow = OPTIONS_FLOW
options_flow_reloads = True
VERSION = 1
MINOR_VERSION = 2

View File

@@ -1,8 +1,7 @@
set_text_overlay:
target:
device:
integration: motioneye
entity:
domain: camera
integration: motioneye
fields:
left_text:
@@ -48,9 +47,8 @@ set_text_overlay:
action:
target:
device:
integration: motioneye
entity:
domain: camera
integration: motioneye
fields:
action:
@@ -88,7 +86,6 @@ action:
snapshot:
target:
device:
integration: motioneye
entity:
domain: camera
integration: motioneye

View File

@@ -2,10 +2,8 @@
"domain": "mvglive",
"name": "MVG",
"codeowners": [],
"disabled": "This integration is disabled because it uses non-open source code to operate.",
"documentation": "https://www.home-assistant.io/integrations/mvglive",
"iot_class": "cloud_polling",
"loggers": ["MVGLive"],
"quality_scale": "legacy",
"requirements": ["PyMVGLive==1.1.4"]
"loggers": ["MVG"],
"requirements": ["mvg==1.4.0"]
}

View File

@@ -1,13 +1,14 @@
"""Support for departure information for public transport in Munich."""
# mypy: ignore-errors
from __future__ import annotations
from collections.abc import Mapping
from copy import deepcopy
from datetime import timedelta
import logging
from typing import Any
import MVGLive
from mvg import MvgApi, MvgApiError, TransportType
import voluptuous as vol
from homeassistant.components.sensor import (
@@ -19,6 +20,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
@@ -44,53 +46,55 @@ ICONS = {
"SEV": "mdi:checkbox-blank-circle-outline",
"-": "mdi:clock",
}
ATTRIBUTION = "Data provided by MVG-live.de"
ATTRIBUTION = "Data provided by mvg.de"
SCAN_INTERVAL = timedelta(seconds=30)
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NEXT_DEPARTURE): [
{
vol.Required(CONF_STATION): cv.string,
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
vol.Optional(
CONF_PRODUCTS, default=DEFAULT_PRODUCT
): cv.ensure_list_csv,
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
}
]
}
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_DIRECTIONS),
SENSOR_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_NEXT_DEPARTURE): [
{
vol.Required(CONF_STATION): cv.string,
vol.Optional(CONF_DESTINATIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_DIRECTIONS, default=[""]): cv.ensure_list_csv,
vol.Optional(CONF_LINES, default=[""]): cv.ensure_list_csv,
vol.Optional(
CONF_PRODUCTS, default=DEFAULT_PRODUCT
): cv.ensure_list_csv,
vol.Optional(CONF_TIMEOFFSET, default=0): cv.positive_int,
vol.Optional(CONF_NUMBER, default=1): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
}
]
}
),
)
def setup_platform(
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the MVGLive sensor."""
add_entities(
(
MVGLiveSensor(
nextdeparture.get(CONF_STATION),
nextdeparture.get(CONF_DESTINATIONS),
nextdeparture.get(CONF_DIRECTIONS),
nextdeparture.get(CONF_LINES),
nextdeparture.get(CONF_PRODUCTS),
nextdeparture.get(CONF_TIMEOFFSET),
nextdeparture.get(CONF_NUMBER),
nextdeparture.get(CONF_NAME),
)
for nextdeparture in config[CONF_NEXT_DEPARTURE]
),
True,
)
sensors = [
MVGLiveSensor(
hass,
nextdeparture.get(CONF_STATION),
nextdeparture.get(CONF_DESTINATIONS),
nextdeparture.get(CONF_LINES),
nextdeparture.get(CONF_PRODUCTS),
nextdeparture.get(CONF_TIMEOFFSET),
nextdeparture.get(CONF_NUMBER),
nextdeparture.get(CONF_NAME),
)
for nextdeparture in config[CONF_NEXT_DEPARTURE]
]
add_entities(sensors, True)
class MVGLiveSensor(SensorEntity):
@@ -100,38 +104,38 @@ class MVGLiveSensor(SensorEntity):
def __init__(
self,
station,
hass: HomeAssistant,
station_name,
destinations,
directions,
lines,
products,
timeoffset,
number,
name,
):
) -> None:
"""Initialize the sensor."""
self._station = station
self._name = name
self._station_name = station_name
self.data = MVGLiveData(
station, destinations, directions, lines, products, timeoffset, number
hass, station_name, destinations, lines, products, timeoffset, number
)
self._state = None
self._icon = ICONS["-"]
@property
def name(self):
def name(self) -> str | None:
"""Return the name of the sensor."""
if self._name:
return self._name
return self._station
return self._station_name
@property
def native_value(self):
def native_value(self) -> str | None:
"""Return the next departure time."""
return self._state
@property
def extra_state_attributes(self):
def extra_state_attributes(self) -> Mapping[str, Any] | None:
"""Return the state attributes."""
if not (dep := self.data.departures):
return None
@@ -140,88 +144,114 @@ class MVGLiveSensor(SensorEntity):
return attr
@property
def icon(self):
def icon(self) -> str | None:
"""Icon to use in the frontend, if any."""
return self._icon
@property
def native_unit_of_measurement(self):
def native_unit_of_measurement(self) -> str | None:
"""Return the unit this state is expressed in."""
return UnitOfTime.MINUTES
def update(self) -> None:
async def async_update(self) -> None:
"""Get the latest data and update the state."""
self.data.update()
await self.data.update()
if not self.data.departures:
self._state = "-"
self._state = None
self._icon = ICONS["-"]
else:
self._state = self.data.departures[0].get("time", "-")
self._icon = ICONS[self.data.departures[0].get("product", "-")]
self._state = self.data.departures[0].get("time_in_mins", "-")
self._icon = self.data.departures[0].get("icon", ICONS["-"])
def _get_minutes_until_departure(departure_time: int) -> int:
"""Calculate the time difference in minutes between the current time and a given departure time.
Args:
departure_time: Unix timestamp of the departure time, in seconds.
Returns:
The time difference in minutes, as an integer.
"""
current_time = dt_util.utcnow()
departure_datetime = dt_util.utc_from_timestamp(departure_time)
time_difference = (departure_datetime - current_time).total_seconds()
return int(time_difference / 60.0)
class MVGLiveData:
"""Pull data from the mvg-live.de web page."""
"""Pull data from the mvg.de web page."""
def __init__(
self, station, destinations, directions, lines, products, timeoffset, number
):
self,
hass: HomeAssistant,
station_name,
destinations,
lines,
products,
timeoffset,
number,
) -> None:
"""Initialize the sensor."""
self._station = station
self._hass = hass
self._station_name = station_name
self._station_id = None
self._destinations = destinations
self._directions = directions
self._lines = lines
self._products = products
self._timeoffset = timeoffset
self._number = number
self._include_ubahn = "U-Bahn" in self._products
self._include_tram = "Tram" in self._products
self._include_bus = "Bus" in self._products
self._include_sbahn = "S-Bahn" in self._products
self.mvg = MVGLive.MVGLive()
self.departures = []
self.departures: list[dict[str, Any]] = []
def update(self):
async def update(self):
"""Update the connection data."""
if self._station_id is None:
try:
station = await MvgApi.station_async(self._station_name)
self._station_id = station["id"]
except MvgApiError as err:
_LOGGER.error(
"Failed to resolve station %s: %s", self._station_name, err
)
self.departures = []
return
try:
_departures = self.mvg.getlivedata(
station=self._station,
timeoffset=self._timeoffset,
ubahn=self._include_ubahn,
tram=self._include_tram,
bus=self._include_bus,
sbahn=self._include_sbahn,
_departures = await MvgApi.departures_async(
station_id=self._station_id,
offset=self._timeoffset,
limit=self._number,
transport_types=[
transport_type
for transport_type in TransportType
if transport_type.value[0] in self._products
]
if self._products
else None,
)
except ValueError:
self.departures = []
_LOGGER.warning("Returned data not understood")
return
self.departures = []
for i, _departure in enumerate(_departures):
# find the first departure meeting the criteria
for _departure in _departures:
if (
"" not in self._destinations[:1]
and _departure["destination"] not in self._destinations
):
continue
if (
"" not in self._directions[:1]
and _departure["direction"] not in self._directions
):
if "" not in self._lines[:1] and _departure["line"] not in self._lines:
continue
if "" not in self._lines[:1] and _departure["linename"] not in self._lines:
time_to_departure = _get_minutes_until_departure(_departure["time"])
if time_to_departure < self._timeoffset:
continue
if _departure["time"] < self._timeoffset:
continue
# now select the relevant data
_nextdep = {}
for k in ("destination", "linename", "time", "direction", "product"):
for k in ("destination", "line", "type", "cancelled", "icon"):
_nextdep[k] = _departure.get(k, "")
_nextdep["time"] = int(_nextdep["time"])
_nextdep["time_in_mins"] = time_to_departure
self.departures.append(_nextdep)
if i == self._number - 1:
break

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
"iot_class": "local_polling",
"requirements": ["nibe==2.18.0"]
"requirements": ["nibe==2.19.0"]
}

View File

@@ -11,7 +11,7 @@
"_r_to_u": "City/county (R-U)",
"_v_to_z": "City/county (V-Z)",
"slots": "Maximum warnings per city/county",
"headline_filter": "Blacklist regex to filter warning headlines"
"headline_filter": "Headline blocklist"
}
}
},
@@ -34,7 +34,7 @@
"_v_to_z": "[%key:component::nina::config::step::user::data::_v_to_z%]",
"slots": "[%key:component::nina::config::step::user::data::slots%]",
"headline_filter": "[%key:component::nina::config::step::user::data::headline_filter%]",
"area_filter": "Whitelist regex to filter warnings based on affected areas"
"area_filter": "Affected area filter"
}
}
},

View File

@@ -13,6 +13,6 @@ NMAP_TRACKED_DEVICES: Final = "nmap_tracked_devices"
# Interval in minutes to exclude devices from a scan while they are home
CONF_HOME_INTERVAL: Final = "home_interval"
CONF_OPTIONS: Final = "scan_options"
DEFAULT_OPTIONS: Final = "-F -T4 --min-rate 10 --host-timeout 5s"
DEFAULT_OPTIONS: Final = "-n -sn -PR -T4 --min-rate 10 --host-timeout 5s"
TRACKER_SCAN_INTERVAL: Final = 120

View File

@@ -124,7 +124,7 @@ class NumberDeviceClass(StrEnum):
CO = "carbon_monoxide"
"""Carbon Monoxide gas concentration.
Unit of measurement: `ppm` (parts per million), mg/m³
Unit of measurement: `ppm` (parts per million)
"""
CO2 = "carbon_dioxide"
@@ -475,10 +475,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
NumberDeviceClass.BATTERY: {PERCENTAGE},
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
NumberDeviceClass.CO: {
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
NumberDeviceClass.CURRENT: set(UnitOfElectricCurrent),

View File

@@ -52,10 +52,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: OnkyoConfigEntry) -> boo
try:
info = await async_interview(host)
except TimeoutError as exc:
raise ConfigEntryNotReady(f"Timed out interviewing: {host}") from exc
except OSError as exc:
raise ConfigEntryNotReady(f"Unable to connect to: {host}") from exc
if info is None:
raise ConfigEntryNotReady(f"Unable to connect to: {host}")
raise ConfigEntryNotReady(f"Unexpected exception interviewing: {host}") from exc
manager = ReceiverManager(hass, entry, info)

View File

@@ -109,24 +109,22 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.debug("Config flow manual: %s", host)
try:
info = await async_interview(host)
except TimeoutError:
_LOGGER.warning("Timed out interviewing: %s", host)
errors["base"] = "cannot_connect"
except OSError:
_LOGGER.exception("Unexpected exception")
_LOGGER.exception("Unexpected exception interviewing: %s", host)
errors["base"] = "unknown"
else:
if info is None:
errors["base"] = "cannot_connect"
self._receiver_info = info
await self.async_set_unique_id(info.identifier, raise_on_progress=False)
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch()
else:
self._receiver_info = info
self._abort_if_unique_id_configured()
await self.async_set_unique_id(
info.identifier, raise_on_progress=False
)
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch()
else:
self._abort_if_unique_id_configured()
return await self.async_step_configure_receiver()
return await self.async_step_configure_receiver()
suggested_values = user_input
if suggested_values is None and self.source == SOURCE_RECONFIGURE:
@@ -214,13 +212,12 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN):
try:
info = await async_interview(host)
except OSError:
_LOGGER.exception("Unexpected exception interviewing host %s", host)
return self.async_abort(reason="unknown")
if info is None:
_LOGGER.debug("SSDP eiscp is None: %s", host)
except TimeoutError:
_LOGGER.warning("Timed out interviewing: %s", host)
return self.async_abort(reason="cannot_connect")
except OSError:
_LOGGER.exception("Unexpected exception interviewing: %s", host)
return self.async_abort(reason="unknown")
await self.async_set_unique_id(info.identifier)
self._abort_if_unique_id_configured(updates={CONF_HOST: info.host})

View File

@@ -124,13 +124,10 @@ class ReceiverManager:
self.callbacks.clear()
async def async_interview(host: str) -> ReceiverInfo | None:
async def async_interview(host: str) -> ReceiverInfo:
"""Interview the receiver."""
info: ReceiverInfo | None = None
with contextlib.suppress(asyncio.TimeoutError):
async with asyncio.timeout(DEVICE_INTERVIEW_TIMEOUT):
info = await aioonkyo.interview(host)
return info
async with asyncio.timeout(DEVICE_INTERVIEW_TIMEOUT):
return await aioonkyo.interview(host)
async def async_discover(hass: HomeAssistant) -> Iterable[ReceiverInfo]:

View File

@@ -5,7 +5,14 @@ from __future__ import annotations
from pyportainer import Portainer
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_HOST, Platform
from homeassistant.const import (
CONF_API_KEY,
CONF_API_TOKEN,
CONF_HOST,
CONF_URL,
CONF_VERIFY_SSL,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_create_clientsession
@@ -19,11 +26,12 @@ type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
"""Set up Portainer from a config entry."""
session = async_create_clientsession(hass)
client = Portainer(
api_url=entry.data[CONF_HOST],
api_key=entry.data[CONF_API_KEY],
session=session,
api_url=entry.data[CONF_URL],
api_key=entry.data[CONF_API_TOKEN],
session=async_create_clientsession(
hass=hass, verify_ssl=entry.data[CONF_VERIFY_SSL]
),
)
coordinator = PortainerCoordinator(hass, entry, client)
@@ -38,3 +46,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) ->
async def async_unload_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
async def async_migrate_entry(hass: HomeAssistant, entry: PortainerConfigEntry) -> bool:
"""Migrate old entry."""
if entry.version < 2:
data = dict(entry.data)
data[CONF_URL] = data.pop(CONF_HOST)
data[CONF_API_TOKEN] = data.pop(CONF_API_KEY)
hass.config_entries.async_update_entry(entry=entry, data=data, version=2)
return True

View File

@@ -131,7 +131,15 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
self.entity_description = entity_description
super().__init__(device_info, coordinator, via_device)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
# Container ID's are ephemeral, so use the container name for the unique ID
# The first one, should always be unique, it's fine if users have aliases
# According to Docker's API docs, the first name is unique
device_identifier = (
self._device_info.names[0].replace("/", " ").strip()
if self._device_info.names
else None
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_identifier}_{entity_description.key}"
@property
def available(self) -> bool:

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
@@ -14,7 +15,7 @@ from pyportainer import (
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_KEY, CONF_HOST
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
@@ -24,8 +25,9 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_API_KEY): str,
vol.Required(CONF_URL): str,
vol.Required(CONF_API_TOKEN): str,
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
}
)
@@ -34,9 +36,11 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect."""
client = Portainer(
api_url=data[CONF_HOST],
api_key=data[CONF_API_KEY],
session=async_get_clientsession(hass),
api_url=data[CONF_URL],
api_key=data[CONF_API_TOKEN],
session=async_get_clientsession(
hass=hass, verify_ssl=data.get(CONF_VERIFY_SSL, True)
),
)
try:
await client.get_endpoints()
@@ -47,19 +51,21 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
except PortainerTimeoutError as err:
raise PortainerTimeout from err
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_HOST])
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_URL])
class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Portainer."""
VERSION = 2
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
try:
await _validate_input(self.hass, user_input)
except CannotConnect:
@@ -72,16 +78,58 @@ class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input[CONF_API_KEY])
await self.async_set_unique_id(user_input[CONF_API_TOKEN])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_HOST], data=user_input
title=user_input[CONF_URL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth when Portainer API authentication fails."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth: ask for new API token and validate."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
try:
await _validate_input(
self.hass,
data={
**reauth_entry.data,
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except PortainerTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
errors=errors,
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""

View File

@@ -16,9 +16,9 @@ from pyportainer.models.docker import DockerContainer
from pyportainer.models.portainer import Endpoint
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
@@ -66,7 +66,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
try:
await self.portainer.get_endpoints()
except PortainerAuthenticationError as err:
raise ConfigEntryError(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
@@ -87,14 +87,14 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
async def _async_update_data(self) -> dict[int, PortainerCoordinatorData]:
"""Fetch data from Portainer API."""
_LOGGER.debug(
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_HOST]
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_URL]
)
try:
endpoints = await self.portainer.get_endpoints()
except PortainerAuthenticationError as err:
_LOGGER.error("Authentication error: %s", repr(err))
raise UpdateFailed(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
@@ -121,7 +121,7 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
) from err
except PortainerAuthenticationError as err:
_LOGGER.exception("Authentication error")
raise UpdateFailed(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},

View File

@@ -60,7 +60,7 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
self._attr_device_info = DeviceInfo(
identifiers={
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{self.device_id}")
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
},
manufacturer=DEFAULT_NAME,
model="Container",

Some files were not shown because too many files have changed in this diff Show More