Compare commits

...

83 Commits

Author SHA1 Message Date
Erik 918185fda4 Extend script continue_on_error suppression when calling actions 2026-04-21 10:01:08 +02:00
Ronald van der Meer 89fe56c599 Add reconfiguration flow to Duco integration (#168652) 2026-04-21 07:46:50 +02:00
Rene Nulsch 2fb1ed443a Validate directory_path and file_name in telegram_bot.download_file (#168656) 2026-04-21 07:46:43 +02:00
Glenn Vandeuren (aka Iondependent) ea8f82e9ba Bump nhc to 0.8.0 (#168651)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: VandeurenGlenn <8685280+VandeurenGlenn@users.noreply.github.com>
2026-04-20 22:09:19 +01:00
puddly 31dc02c3ee Bump universal-silabs-flasher to 1.1.0 (#168647) 2026-04-20 23:02:53 +02:00
Nils Ove Erstad 70ec6fa654 Fix MQTT JSON light restoring None color_mode on startup (#168608)
Co-authored-by: Jan Bouwhuis <jbouwh@users.noreply.github.com>
2026-04-20 21:59:03 +02:00
puddly c2946404ea Bump ZHA to 1.2.1 (#168644) 2026-04-20 15:42:04 -04:00
Abílio Costa f715bcd7c1 Change Claude gh review agent back to skill (#168642) 2026-04-20 20:59:20 +02:00
Manu 0c0e61e133 Remove hunterjm from Xbox integration codeowners (#167024) 2026-04-20 20:58:43 +02:00
Tomer 305761e7de Victron GX: device_tracker platfrom (#168462)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-20 20:54:58 +02:00
puddly 3b81f09765 Bump serialx to 1.4.1 (#168640) 2026-04-20 20:53:51 +02:00
epenet a2cc7d0fca Use runtime_data in watttime integration (#168630) 2026-04-20 20:46:41 +02:00
Ronald van der Meer 038b56e5eb Claim Silver quality scale for Duco integration (#168620) 2026-04-20 19:45:57 +01:00
Franck Nijhof 0edcb8d60f Set parallel updates for PVOutput sensor platform (#168643) 2026-04-20 20:45:11 +02:00
Stefan Agner cc8000ed89 Remove hassio-main panel registration (#168626)
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-20 19:42:10 +02:00
Mick Vleeshouwer a92dcaaf5f Add first cover entity tests to Overkiz (#165670)
Co-authored-by: Copilot <copilot@github.com>
2026-04-20 18:30:26 +01:00
Joakim Plate e889541d2e Correct state/device class for water in gardena (#168637) 2026-04-20 19:02:29 +02:00
Michael 85e9d3c6a8 Migrate Z-Wave.Me to use runtime_data (#168562) 2026-04-20 18:29:46 +02:00
Robert Resch fe9db39684 Add docker syntax to all Docker files (#168350) 2026-04-20 17:31:04 +02:00
Assaf Akrabi 253d3e1758 Migrate lib to aiorussound for Russound RNET (#168484) 2026-04-20 17:21:45 +02:00
Raphael Hehl dcb5f0d533 Improve UniFi config flow quality scale: config-flow and config-flow-test-coverage (#168477)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-04-20 17:16:51 +02:00
epenet d5e4be317c Use runtime_data in wolflink integration (#168625) 2026-04-20 17:09:49 +02:00
albaintor 0ebf4d86f5 Fixed Kodi Media Browsing (#165819) 2026-04-20 17:09:03 +02:00
Klaas Schoute 1a86913239 Merge config flows for powerfox integration (#164019) 2026-04-20 17:07:45 +02:00
Max R f2c010aaaf feat(citybikes): add number of ebikes attribute (#166229) 2026-04-20 17:02:24 +02:00
Erik Montnemery 74de32377e Improve async_get_system_info tests (#168586)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-20 16:47:40 +02:00
Franck Nijhof 901925ad54 Add Fumis pellet stove integration (#168515) 2026-04-20 16:25:12 +02:00
Øyvind Matheson Wergeland defbfe17a3 Fix nobo_hub via_device warning (#168595) 2026-04-20 16:25:05 +02:00
Merlin Schumacher 9795f55af3 Remove reference to deprecated state STANDBY from universal media player (#160930)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2026-04-20 16:10:34 +02:00
johanzander 967c5d2092 Fix KeyError in Growatt server login response handling (#168482)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-20 15:41:38 +02:00
Paulus Schoutsen cdecff9380 Use dedicated power commands for LG infrared (#168488)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: balloob <1444314+balloob@users.noreply.github.com>
2026-04-20 15:36:00 +02:00
Robert Resch 59ceb7c58c Revert "Update PyTurboJPEG to 2.2.0" (#168617) 2026-04-20 15:28:17 +02:00
Kurt Chrisford d66b9f4316 Bump actron-neo-api to 0.5.3 (#167732) 2026-04-20 14:58:43 +02:00
Christophe Gagnier 40477ff87b Bump python-technove to 2.1.1 (#168403)
Co-authored-by: Moustachauve <2206577+Moustachauve@users.noreply.github.com>
2026-04-20 14:52:07 +02:00
epenet d96b626497 Use runtime_data in vallox integration (#168604)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 14:41:53 +02:00
epenet 0c294b342c Use runtime_data in verisure integration (#168605)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 14:39:53 +02:00
Marc Mueller 1f64ca4a8d Update pydantic to 2.13.2 (#168601)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-04-20 14:36:07 +02:00
epenet 79ae0e6c49 Use runtime_data in venstar integration (#168613) 2026-04-20 14:32:13 +02:00
epenet dc0052552a Use runtime_data in vera integration (#168614) 2026-04-20 14:31:22 +02:00
epenet 77f4baa79e Use runtime_data in volumio integration (#168616) 2026-04-20 14:30:34 +02:00
Matthias Alphart 52377b958b Update knx-frontend to 2026.4.19.175239 (#168568) 2026-04-20 14:28:46 +02:00
Denis Shulyaka 09105693c7 Filter OpenAI schema (#168543) 2026-04-20 14:28:13 +02:00
epenet db838f67d7 Move vallox service registration to services.py (#168612)
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-20 14:26:57 +02:00
renovate[bot] 720fd6d802 Update ruff (#168240)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: edenhaus <26537646+edenhaus@users.noreply.github.com>
2026-04-20 14:25:43 +02:00
epenet b43d6a70da Fix cookie file suppression in verisure (#168609) 2026-04-20 13:41:41 +02:00
Erik Montnemery b5caabcbae Fix quantum_gateway tests (#168610) 2026-04-20 13:37:37 +02:00
Raphael Hehl 9bb46494d3 unifi: implement parallel-updates quality scale rule (#168563) 2026-04-20 13:26:17 +02:00
Erik Montnemery ca066b94c5 Deprecate legacy device tracker (#168387) 2026-04-20 13:20:36 +02:00
Erik Montnemery 8de6fa63cd Allow passing a set of event types to logbook.async_subscribe_events (#168163) 2026-04-20 13:19:31 +02:00
Erik Montnemery 866f41791a Deprecate support for local installation of dependencies (#168164) 2026-04-20 13:19:13 +02:00
epenet 5b3d2f823f Always load all platforms in sfr_box (#168594)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 13:05:51 +02:00
Ariel Ebersberger e1d38fa237 Fix flaky airtouch5 test for Python 3.14.3 (#168366) 2026-04-20 12:51:18 +02:00
epenet 8eef269ce3 Use runtime_data in upb integration (#168600)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 12:50:22 +02:00
David Bonnes 8afee640ef Remove device ids from extra_state_attrs of Evohome's Button entities (#168517)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-20 12:17:09 +02:00
Maciej Bieniek 10fd51b34d Add ice phenomena sensor to IMGW-PIB integration (#168548) 2026-04-20 12:07:26 +02:00
Copilot a1cde0308a Clarify Copilot review guidance for validated entity action inputs (#168449)
Co-authored-by: balloob <1444314+balloob@users.noreply.github.com>
2026-04-20 11:56:21 +02:00
Erik Montnemery 5c14025e70 Sort keys in dict returned by async_get_system_info (#168585) 2026-04-20 11:31:03 +02:00
epenet 7e5762dcee Use runtime_data in ukraine_alarm integration (#168597)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 11:09:10 +02:00
Franck Nijhof c425b69373 Set parallel updates for Tailscale platforms (#168596) 2026-04-20 11:09:06 +02:00
Thijs W. f73ee29ffb Add seek support to frontier_silicon (#168483) 2026-04-20 11:05:42 +02:00
Erik Montnemery db9c5a6df4 Adjust repair text about unsupported installation method (#168156)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-20 11:03:48 +02:00
J. Nick Koston 00d16864e3 Bump habluetooth to 6.1.0 (#168576) 2026-04-20 04:01:16 -05:00
Erik Montnemery 2fb22e5654 Use hass_tmp_config_dir fixture in device_tracker tests (#168582) 2026-04-20 10:58:23 +02:00
epenet 65e09c3213 Use runtime_data in toon integration (#168591)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 10:51:13 +02:00
SeifEddineMezned 86eece57c8 Fix grammar and clarity in strings.json (#168577) 2026-04-20 10:47:37 +02:00
trevorvey e449e28ff5 Updated H590 input source mapping (#168523) 2026-04-20 10:41:49 +02:00
TheJulianJES 6e5b72ea87 Bump matter-python-client to 0.6.0 (#168312) 2026-04-20 10:39:38 +02:00
dependabot[bot] 450aa6d73b Bump actions/cache from 5.0.4 to 5.0.5 (#168583)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-20 10:39:15 +02:00
dependabot[bot] 953fda87c8 Bump j178/prek-action from 2.0.1 to 2.0.2 (#168584)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-20 10:39:01 +02:00
epenet 4b38b79ac5 Use runtime_data in todoist integration (#168590)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 10:37:58 +02:00
epenet 7acc412902 Use runtime_data in thethingsnetwork integration (#168589)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 10:28:36 +02:00
Nick Berardi bf2364e4cb Add First Alert app selection to Lyric auth (#168427)
Co-authored-by: Erwin Douna <e.douna@gmail.com>
2026-04-20 10:20:34 +02:00
Amit Finkelstein 2a6fba3990 Bump hdate to 1.2.1 (#168538) 2026-04-20 10:10:21 +02:00
epenet 6a8220a9df Use runtime_data in tami4 integration (#168587)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 09:56:18 +02:00
Øyvind Matheson Wergeland b005fb236f Improve nobo_hub config entry setup (#168550)
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2026-04-20 09:08:49 +02:00
renovate[bot] 528f7625f4 Update zizmor (#168581) 2026-04-20 08:34:51 +02:00
Franck Nijhof 0358696028 Update tailscale to 0.7.0 (#168544)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-20 08:31:26 +02:00
Franck Nijhof ca4b4de20e Migrate Tailscale to use runtime_data (#168556) 2026-04-20 08:30:53 +02:00
Ronald van der Meer 34530810db Enable strict typing for Duco integration (#168572) 2026-04-20 08:30:25 +02:00
Michael c201275fef Migrate Zeversolar to use runtime_data (#168574) 2026-04-20 08:29:45 +02:00
Tomer ef2fa67c36 Victron GX: dedupe strings.json (#168460) 2026-04-20 08:26:43 +02:00
Fabian Munkes 0af4dfb7fd Add initial support for PlayerOptions: Select entities to Music Assistant (#167974)
Co-authored-by: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com>
2026-04-20 05:14:21 +02:00
Thomas55555 894b3bd6a4 Add suggested uom to mop_drying_remaining_time in roborock (#168516) 2026-04-20 03:13:14 +02:00
307 changed files with 48727 additions and 2111 deletions
@@ -1,7 +1,6 @@
---
name: github-pr-reviewer
description: Reviews GitHub pull requests and provides feedback comments.
disallowedTools: Write, Edit
description: Reviews GitHub pull requests and provides feedback comments. This is the top skill to use for reviewing Pull Requests from GitHub.
---
# Review GitHub Pull Request
+2
View File
@@ -12,6 +12,8 @@ description: Everything you need to know to build, test and review Home Assistan
- When looking for examples, prefer integrations with the platinum or gold quality scale level first.
- Polling intervals are NOT user-configurable. Never add scan_interval, update_interval, or polling frequency options to config flows or config entries.
- Do NOT allow users to set config entry names in config flows. Names are automatically generated or can be customized later in UI. Exception: helper integrations may allow custom names.
- For entity actions and entity services, avoid requesting redundant defensive checks for fields already enforced by Home Assistant validation schemas and entity filters; only request extra guards when values bypass validation or are transformed unsafely.
- When validation guarantees a key is present, prefer direct dictionary indexing (`data["key"]`) over `.get("key")` so invalid assumptions fail fast.
The following platforms have extra guidelines:
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
+3
View File
@@ -32,6 +32,9 @@ Prefer concrete types (for example, `HomeAssistant`, `MockConfigEntry`, etc.) ov
Integrations with Platinum or Gold level in the Integration Quality Scale reflect a high standard of code quality and maintainability. When looking for examples of something, these are good places to start. The level is indicated in the manifest.json of the integration.
When reviewing entity actions, do not suggest extra defensive checks for input fields that are already validated by Home Assistant's service/action schemas and entity selection filters. Suggest additional guards only when data bypasses those validators or is transformed into a less-safe form.
When validation guarantees a dict key exists, prefer direct key access (`data["key"]`) instead of `.get("key")` so contract violations are surfaced instead of silently masked.
# Skills
+11
View File
@@ -6,6 +6,7 @@
"pep621",
"pip_requirements",
"pre-commit",
"regex",
"homeassistant-manifest"
],
@@ -26,6 +27,16 @@
]
},
"regexManagers": [
{
"description": "Update ruff required-version in pyproject.toml",
"managerFilePatterns": ["/^pyproject\\.toml$/"],
"matchStrings": ["required-version = \">=(?<currentValue>[\\d.]+)\""],
"depNameTemplate": "ruff",
"datasourceTemplate": "pypi"
}
],
"minimumReleaseAge": "7 days",
"prConcurrentLimit": 10,
"prHourlyLimit": 2,
+24 -24
View File
@@ -282,7 +282,7 @@ jobs:
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
echo "::add-matcher::.github/workflows/matchers/codespell.json"
- name: Run prek
uses: j178/prek-action@53276d8b0d10f8b6672aa85b4588c6921d0370cc # v2.0.1
uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2
env:
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config,zizmor
RUFF_OUTPUT_FORMAT: github
@@ -303,7 +303,7 @@ jobs:
with:
persist-credentials: false
- name: Run zizmor
uses: j178/prek-action@53276d8b0d10f8b6672aa85b4588c6921d0370cc # v2.0.1
uses: j178/prek-action@cbc2f23eb5539cf20d82d1aabd0d0ecbcc56f4e3 # v2.0.2
with:
extra-args: --all-files zizmor
@@ -366,7 +366,7 @@ jobs:
echo "key=uv-${UV_CACHE_VERSION}-${uv_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
key: >-
@@ -374,7 +374,7 @@ jobs:
needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: ${{ env.UV_CACHE_DIR }}
key: >-
@@ -386,7 +386,7 @@ jobs:
env.HA_SHORT_VERSION }}-
- name: Check if apt cache exists
id: cache-apt-check
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
path: |
@@ -432,7 +432,7 @@ jobs:
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -486,7 +486,7 @@ jobs:
&& github.event.inputs.audit-licenses-only != 'true'
steps:
- name: Restore apt cache
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -517,7 +517,7 @@ jobs:
check-latest: true
- name: Restore full Python virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -554,7 +554,7 @@ jobs:
check-latest: true
- name: Restore full Python virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -645,7 +645,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -696,7 +696,7 @@ jobs:
check-latest: true
- name: Restore full Python virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -749,7 +749,7 @@ jobs:
check-latest: true
- name: Restore full Python virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -806,7 +806,7 @@ jobs:
echo "key=mypy-${MYPY_CACHE_VERSION}-${mypy_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -814,7 +814,7 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Restore mypy cache
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: .mypy_cache
key: >-
@@ -856,7 +856,7 @@ jobs:
- base
steps:
- name: Restore apt cache
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -889,7 +889,7 @@ jobs:
check-latest: true
- name: Restore full Python virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -932,7 +932,7 @@ jobs:
group: ${{ fromJson(needs.info.outputs.test_groups) }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -966,7 +966,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -1084,7 +1084,7 @@ jobs:
mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1119,7 +1119,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -1242,7 +1242,7 @@ jobs:
postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1279,7 +1279,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
@@ -1425,7 +1425,7 @@ jobs:
group: ${{ fromJson(needs.info.outputs.test_groups) }}
steps:
- name: Restore apt cache
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: |
${{ env.APT_CACHE_DIR }}
@@ -1459,7 +1459,7 @@ jobs:
check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: venv
fail-on-cache-miss: true
+2 -2
View File
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.15.1
rev: v0.15.10
hooks:
- id: ruff-check
args:
@@ -18,7 +18,7 @@ repos:
exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/zizmorcore/zizmor-pre-commit
rev: v1.23.1
rev: v1.24.0
hooks:
- id: zizmor
args:
+3
View File
@@ -46,6 +46,7 @@ homeassistant.components.accuweather.*
homeassistant.components.acer_projector.*
homeassistant.components.acmeda.*
homeassistant.components.actiontec.*
homeassistant.components.actron_air.*
homeassistant.components.adax.*
homeassistant.components.adguard.*
homeassistant.components.aftership.*
@@ -178,6 +179,7 @@ homeassistant.components.dropbox.*
homeassistant.components.droplet.*
homeassistant.components.dsmr.*
homeassistant.components.duckdns.*
homeassistant.components.duco.*
homeassistant.components.dunehd.*
homeassistant.components.duotecno.*
homeassistant.components.easyenergy.*
@@ -222,6 +224,7 @@ homeassistant.components.fronius.*
homeassistant.components.frontend.*
homeassistant.components.fujitsu_fglair.*
homeassistant.components.fully_kiosk.*
homeassistant.components.fumis.*
homeassistant.components.fyta.*
homeassistant.components.generic_hygrostat.*
homeassistant.components.generic_thermostat.*
+3
View File
@@ -22,3 +22,6 @@ Prefer concrete types (for example, `HomeAssistant`, `MockConfigEntry`, etc.) ov
## Good practices
Integrations with Platinum or Gold level in the Integration Quality Scale reflect a high standard of code quality and maintainability. When looking for examples of something, these are good places to start. The level is indicated in the manifest.json of the integration.
When reviewing entity actions, do not suggest extra defensive checks for input fields that are already validated by Home Assistant's service/action schemas and entity selection filters. Suggest additional guards only when data bypasses those validators or is transformed into a less-safe form.
When validation guarantees a dict key exists, prefer direct key access (`data["key"]`) instead of `.get("key")` so contract violations are surfaced instead of silently masked.
Generated
+4 -2
View File
@@ -592,6 +592,8 @@ CLAUDE.md @home-assistant/core
/tests/components/fujitsu_fglair/ @crevetor
/homeassistant/components/fully_kiosk/ @cgarwood
/tests/components/fully_kiosk/ @cgarwood
/homeassistant/components/fumis/ @frenck
/tests/components/fumis/ @frenck
/homeassistant/components/fyta/ @dontinelli
/tests/components/fyta/ @dontinelli
/homeassistant/components/garage_door/ @home-assistant/core
@@ -1989,8 +1991,8 @@ CLAUDE.md @home-assistant/core
/tests/components/wsdot/ @ucodery
/homeassistant/components/wyoming/ @synesthesiam
/tests/components/wyoming/ @synesthesiam
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
/tests/components/xbox/ @hunterjm @tr4nt0r
/homeassistant/components/xbox/ @tr4nt0r
/tests/components/xbox/ @tr4nt0r
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
Generated
+1
View File
@@ -1,3 +1,4 @@
# syntax=docker/dockerfile@sha256:2780b5c3bab67f1f76c781860de469442999ed1a0d7992a5efdf2cffc0e3d769
# Automatically generated by hassfest.
#
# To update, run python3 -m script.hassfest -p docker
+1
View File
@@ -1,3 +1,4 @@
# syntax=docker/dockerfile@sha256:2780b5c3bab67f1f76c781860de469442999ed1a0d7992a5efdf2cffc0e3d769
FROM mcr.microsoft.com/vscode/devcontainers/base:debian
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
"iot_class": "local_polling",
"quality_scale": "legacy",
"requirements": ["serialx==1.2.2"]
"requirements": ["serialx==1.4.1"]
}
+16 -5
View File
@@ -15,8 +15,10 @@ from homeassistant.components.climate import (
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
from .entity import ActronAirAcEntity, ActronAirZoneEntity, actron_air_command
@@ -139,20 +141,24 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
@actron_air_command
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set a new fan mode."""
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR[fan_mode]
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
@actron_air_command
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR[hvac_mode]
await self._status.ac_system.set_system_mode(ac_mode)
@actron_air_command
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
await self._status.user_aircon_settings.set_temperature(temperature=temp)
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="temperature_missing",
)
await self._status.user_aircon_settings.set_temperature(temperature=temperature)
class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
@@ -221,4 +227,9 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
@actron_air_command
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the temperature."""
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="temperature_missing",
)
await self._zone.set_temperature(temperature=temperature)
@@ -23,7 +23,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
self._user_code: str = ""
self._verification_uri: str = ""
self._expires_minutes: str = "30"
self.login_task: asyncio.Task | None = None
self.login_task: asyncio.Task[None] | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -94,7 +94,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.error("Error getting user info: %s", err)
return self.async_abort(reason="oauth2_error")
unique_id = str(user_data["id"])
unique_id = user_data.sub
await self.async_set_unique_id(unique_id)
# Check if this is a reauth flow
@@ -107,7 +107,7 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_data["email"],
title=user_data.email,
data={CONF_API_TOKEN: self._api.refresh_token_value},
)
@@ -78,7 +78,14 @@ class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirStatus]):
translation_placeholders={"error": repr(err)},
) from err
self.status = self.api.state_manager.get_status(self.serial_number)
status = self.api.state_manager.get_status(self.serial_number)
if status is None:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={"error": "Status not available"},
)
self.status = status
self.last_seen = dt_util.utcnow()
return self.status
@@ -24,7 +24,7 @@ def actron_air_command[_EntityT: ActronAirEntity, **_P](
"""
@wraps(func)
async def wrapper(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
async def wrapper(self: _EntityT, /, *args: _P.args, **kwargs: _P.kwargs) -> None:
"""Wrap API calls with exception handling."""
try:
await func(self, *args, **kwargs)
@@ -13,5 +13,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"requirements": ["actron-neo-api==0.5.0"]
"requirements": ["actron-neo-api==0.5.3"]
}
@@ -69,4 +69,4 @@ rules:
# Platinum
async-dependency: done
inject-websession: todo
strict-typing: todo
strict-typing: done
@@ -58,6 +58,9 @@
"setup_connection_error": {
"message": "Failed to connect to the Actron Air API"
},
"temperature_missing": {
"message": "Provide a temperature value when adjusting the climate entity."
},
"update_error": {
"message": "An error occurred while retrieving data from the Actron Air API: {error}"
}
@@ -34,7 +34,7 @@ def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
def get_serial_number_from_jid(jid: str) -> str:
"""Get serial number from Beolink JID."""
return jid.split(".")[2].split("@")[0]
return jid.split(".")[2].split("@", maxsplit=1)[0]
async def get_remotes(client: MozartClient) -> list[PairedRemote]:
@@ -21,6 +21,6 @@
"bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.4",
"dbus-fast==4.0.4",
"habluetooth==5.11.1"
"habluetooth==6.1.0"
]
}
@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/camera",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["PyTurboJPEG==2.2.0"]
"requirements": ["PyTurboJPEG==1.8.3"]
}
@@ -50,7 +50,9 @@ ATTR_UID = "uid"
ATTR_LATITUDE = "latitude"
ATTR_LONGITUDE = "longitude"
ATTR_EMPTY_SLOTS = "empty_slots"
ATTR_FREE_EBIKES = "free_ebikes"
ATTR_TIMESTAMP = "timestamp"
EXTRA_EBIKES = "ebikes"
CONF_NETWORK = "network"
CONF_STATIONS_LIST = "stations"
@@ -238,5 +240,6 @@ class CityBikesStation(SensorEntity):
ATTR_LATITUDE: station.latitude,
ATTR_LONGITUDE: station.longitude,
ATTR_EMPTY_SLOTS: station.empty_slots,
ATTR_FREE_EBIKES: station.extra.get(EXTRA_EBIKES),
ATTR_TIMESTAMP: station.timestamp,
}
@@ -6,6 +6,7 @@ import asyncio
from collections.abc import Callable, Coroutine, Sequence
from datetime import datetime, timedelta
import hashlib
import logging
from types import ModuleType
from typing import Any, Final, Protocol, final
@@ -82,6 +83,8 @@ from .const import (
SourceType,
)
_LOGGER = logging.getLogger(__name__)
SERVICE_SEE: Final = "see"
SOURCE_TYPES = [cls.value for cls in SourceType]
@@ -128,6 +131,8 @@ SERVICE_SEE_PAYLOAD_SCHEMA: Final[vol.Schema] = vol.Schema(
YAML_DEVICES: Final = "known_devices.yaml"
EVENT_NEW_DEVICE: Final = "device_tracker_new_device"
DATA_LEGACY_TRACKERS: Final = "device_tracker.legacy_trackers"
class SeeCallback(Protocol):
"""Protocol type for DeviceTracker.see callback."""
@@ -243,8 +248,19 @@ async def _async_setup_integration(
tracker = await get_tracker(hass, config)
tracker_future.set_result(tracker)
warned_called_see = False
async def async_see_service(call: ServiceCall) -> None:
"""Service to see a device."""
nonlocal warned_called_see
if not warned_called_see:
_LOGGER.warning(
"The %s.%s action is deprecated and will be removed in "
"Home Assistant Core 2027.5",
DOMAIN,
SERVICE_SEE,
)
warned_called_see = True
# Temp workaround for iOS, introduced in 0.65
data = dict(call.data)
data.pop("hostname", None)
@@ -327,6 +343,18 @@ class DeviceTrackerPlatform:
try:
scanner = None
setup: bool | None = None
legacy_trackers = hass.data.setdefault(DATA_LEGACY_TRACKERS, set())
if full_name not in legacy_trackers:
legacy_trackers.add(full_name)
_LOGGER.warning(
"The legacy device tracker platform %s is being set up; legacy "
"device trackers are deprecated and will be removed in Home "
"Assistant Core 2027.5, please migrate to an integration which "
"uses a modern config entry based device tracker",
full_name,
)
if hasattr(self.platform, "async_get_scanner"):
scanner = await self.platform.async_get_scanner(
hass, {DOMAIN: self.config}
@@ -72,6 +72,38 @@ class DucoConfigFlow(ConfigFlow, domain=DOMAIN):
description_placeholders={"name": self._box_name},
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
try:
box_name, mac = await self._validate_input(user_input[CONF_HOST])
except DucoConnectionError:
errors["base"] = "cannot_connect"
except DucoError:
_LOGGER.exception("Unexpected error connecting to Duco box")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(format_mac(mac))
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
reconfigure_entry,
title=box_name,
data_updates={CONF_HOST: user_input[CONF_HOST]},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
STEP_USER_SCHEMA, reconfigure_entry.data
),
errors=errors,
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
+1 -1
View File
@@ -7,7 +7,7 @@
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["duco"],
"quality_scale": "bronze",
"quality_scale": "silver",
"requirements": ["python-duco-client==0.3.2"],
"zeroconf": [
{
@@ -66,8 +66,14 @@ rules:
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
repair-issues: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: >-
The integration has no actionable repair scenarios. Connection failures are
handled by the coordinator (unavailable entities) and resolve automatically.
There are no credentials to expire and no versioned API to become
incompatible with.
stale-devices:
status: todo
comment: >-
@@ -76,4 +82,4 @@ rules:
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo
strict-typing: done
@@ -4,6 +4,8 @@
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unique_id_mismatch": "The device you entered belongs to a different Duco box.",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"error": {
@@ -14,6 +16,14 @@
"discovery_confirm": {
"description": "Do you want to set up {name}?"
},
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "[%key:component::duco::config::step::user::data_description::host%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
+15 -16
View File
@@ -9,10 +9,11 @@ from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import EVOHOME_DATA
from .coordinator import EvoDataUpdateCoordinator
from .entity import EvoEntity, is_valid_zone, unique_zone_id
from .entity import is_valid_zone, unique_zone_id
async def async_setup_platform(
@@ -40,15 +41,22 @@ async def async_setup_platform(
async_add_entities(entities)
for entity in entities:
await entity.update_attrs()
class EvoResetButtonBase(EvoEntity, ButtonEntity):
"""Base for reset button entities."""
class EvoResetButtonBase(CoordinatorEntity[EvoDataUpdateCoordinator], ButtonEntity):
"""Base for Evohome's Button entities."""
_attr_entity_category = EntityCategory.CONFIG
_evo_state_attr_names = ()
_evo_device: evo.ControlSystem | evo.HotWater | evo.Zone
def __init__(
self,
coordinator: EvoDataUpdateCoordinator,
evo_device: evo.ControlSystem | evo.HotWater | evo.Zone,
) -> None:
"""Initialize an Evohome reset button entity."""
super().__init__(coordinator, context=evo_device.id)
self._evo_device = evo_device
async def async_press(self) -> None:
"""Reset the Evohome entity to its base operating mode."""
@@ -58,10 +66,7 @@ class EvoResetButtonBase(EvoEntity, ButtonEntity):
class EvoResetSystemButton(EvoResetButtonBase):
"""Button entity for system reset."""
_attr_translation_key = "reset_system_mode"
_evo_device: evo.ControlSystem
_evo_id_attr = "system_id"
def __init__(
self,
@@ -78,10 +83,7 @@ class EvoResetSystemButton(EvoResetButtonBase):
class EvoResetDhwButton(EvoResetButtonBase):
"""Button entity for DHW override reset."""
_attr_translation_key = "clear_dhw_override"
_evo_device: evo.HotWater
_evo_id_attr = "dhw_id"
def __init__(
self,
@@ -98,10 +100,7 @@ class EvoResetDhwButton(EvoResetButtonBase):
class EvoResetZoneButton(EvoResetButtonBase):
"""Button entity for zone override reset."""
_attr_translation_key = "clear_zone_override"
_evo_device: evo.Zone
_evo_id_attr = "zone_id"
def __init__(
self,
+1 -5
View File
@@ -40,11 +40,7 @@ def unique_zone_id(evo_device: evo.Zone) -> str:
class EvoEntity(CoordinatorEntity[EvoDataUpdateCoordinator]):
"""Base for any evohome-compatible entity (controller, DHW, zone).
This includes the controller, (1 to 12) heating zones and (optionally) a
DHW controller.
"""
"""Base for Evohome's Climate & WaterHeater entities."""
_evo_device: evo.ControlSystem | evo.HotWater | evo.Zone
_evo_id_attr: str
@@ -198,7 +198,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
def is_matching(self, other_flow: Self) -> bool:
"""Return True if other_flow is matching this flow."""
return other_flow._host == self._host # noqa: SLF001
return other_flow._host == self._host
async def async_step_confirm(
self, user_input: dict[str, Any] | None = None
@@ -148,7 +148,7 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN):
def is_matching(self, other_flow: Self) -> bool:
"""Return True if other_flow is matching this flow."""
return other_flow._host == self._host # noqa: SLF001
return other_flow._host == self._host
async def async_step_confirm(
self, user_input: dict[str, Any] | None = None
@@ -26,6 +26,7 @@ from homeassistant.components.media_player import (
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import dt as dt_util
from . import FrontierSiliconConfigEntry
from .browse_media import browse_node, browse_top_level
@@ -118,7 +119,8 @@ class AFSAPIDevice(MediaPlayerEntity):
features |= MediaPlayerEntityFeature.REPEAT_SET
if self.__play_caps & PlayCaps.SHUFFLE:
features |= MediaPlayerEntityFeature.SHUFFLE_SET
if self.__play_caps & PlayCaps.SEEK:
features |= MediaPlayerEntityFeature.SEEK
if self._supports_sound_mode:
features |= MediaPlayerEntityFeature.SELECT_SOUND_MODE
@@ -223,6 +225,21 @@ class AFSAPIDevice(MediaPlayerEntity):
self._attr_is_volume_muted = await afsapi.get_mute()
self._attr_media_image_url = await afsapi.get_play_graphic()
if self.__play_caps and self.__play_caps & PlayCaps.SEEK:
position_ms = await afsapi.get_play_position()
duration_ms = await afsapi.get_play_duration()
self._attr_media_position = (
position_ms // 1000 if position_ms is not None else None
)
self._attr_media_duration = (
duration_ms // 1000 if duration_ms is not None else None
)
self._attr_media_position_updated_at = dt_util.utcnow()
else:
self._attr_media_position = None
self._attr_media_duration = None
self._attr_media_position_updated_at = None
if self._supports_sound_mode:
try:
eq_preset = await afsapi.get_eq_preset()
@@ -247,6 +264,9 @@ class AFSAPIDevice(MediaPlayerEntity):
self._attr_is_volume_muted = None
self._attr_media_image_url = None
self._attr_sound_mode = None
self._attr_media_position = None
self._attr_media_duration = None
self._attr_media_position_updated_at = None
self._attr_volume_level = None
@@ -334,6 +354,10 @@ class AFSAPIDevice(MediaPlayerEntity):
"""Set shuffle mode."""
await self.fs_device.set_play_shuffle(shuffle)
async def async_media_seek(self, position: float) -> None:
"""Seek to a position in seconds."""
await self.fs_device.set_play_position(int(position * 1000))
async def async_browse_media(
self,
media_content_type: MediaType | str | None = None,
@@ -0,0 +1,26 @@
"""Support for Fumis pellet stoves."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import FumisConfigEntry, FumisDataUpdateCoordinator
PLATFORMS = [Platform.CLIMATE]
async def async_setup_entry(hass: HomeAssistant, entry: FumisConfigEntry) -> bool:
"""Set up Fumis from a config entry."""
coordinator = FumisDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: FumisConfigEntry) -> bool:
"""Unload Fumis config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
+128
View File
@@ -0,0 +1,128 @@
"""Support for Fumis climate entities."""
from __future__ import annotations
from typing import Any
from fumis import StoveStatus
from homeassistant.components.climate import (
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FumisConfigEntry, FumisDataUpdateCoordinator
from .entity import FumisEntity
from .helpers import fumis_exception_handler
PARALLEL_UPDATES = 1
STOVE_STATUS_TO_HVAC_ACTION: dict[StoveStatus, HVACAction | None] = {
StoveStatus.OFF: HVACAction.OFF,
StoveStatus.COLD_START_OFF: HVACAction.OFF,
StoveStatus.WOOD_BURNING_OFF: HVACAction.OFF,
StoveStatus.PRE_HEATING: HVACAction.PREHEATING,
StoveStatus.IGNITION: HVACAction.PREHEATING,
StoveStatus.PRE_COMBUSTION: HVACAction.PREHEATING,
StoveStatus.COLD_START: HVACAction.PREHEATING,
StoveStatus.COMBUSTION: HVACAction.HEATING,
StoveStatus.ECO: HVACAction.HEATING,
StoveStatus.HYBRID_INIT: HVACAction.HEATING,
StoveStatus.HYBRID_START: HVACAction.HEATING,
StoveStatus.WOOD_START: HVACAction.HEATING,
StoveStatus.WOOD_COMBUSTION: HVACAction.HEATING,
StoveStatus.COOLING: HVACAction.IDLE,
StoveStatus.UNKNOWN: None,
}
async def async_setup_entry(
hass: HomeAssistant,
entry: FumisConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Fumis climate entity based on a config entry."""
async_add_entities([FumisClimateEntity(entry.runtime_data)])
class FumisClimateEntity(FumisEntity, ClimateEntity):
"""Defines a Fumis climate entity."""
_attr_hvac_modes = [HVACMode.OFF, HVACMode.HEAT]
_attr_max_temp = 35.0
_attr_min_temp = 10.0
_attr_name = None
_attr_supported_features = (
ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.TURN_OFF
| ClimateEntityFeature.TURN_ON
)
_attr_target_temperature_step = 0.5
_attr_temperature_unit = UnitOfTemperature.CELSIUS
def __init__(self, coordinator: FumisDataUpdateCoordinator) -> None:
"""Initialize the Fumis climate entity."""
super().__init__(coordinator)
self._attr_unique_id = coordinator.config_entry.unique_id
@property
def hvac_mode(self) -> HVACMode:
"""Return the current HVAC mode."""
if self.coordinator.data.controller.on:
return HVACMode.HEAT
return HVACMode.OFF
@property
def hvac_action(self) -> HVACAction | None:
"""Return the current HVAC action."""
return STOVE_STATUS_TO_HVAC_ACTION[
self.coordinator.data.controller.stove_status
]
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
if (temp := self.coordinator.data.controller.main_temperature) is None:
return None
return temp.actual
@property
def target_temperature(self) -> float | None:
"""Return the target temperature."""
if (temp := self.coordinator.data.controller.main_temperature) is None:
return None
return temp.setpoint
@fumis_exception_handler
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
if hvac_mode == HVACMode.HEAT:
await self.coordinator.client.turn_on()
else:
await self.coordinator.client.turn_off()
await self.coordinator.async_request_refresh()
@fumis_exception_handler
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the target temperature."""
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
return
await self.coordinator.client.set_target_temperature(temperature)
await self.coordinator.async_request_refresh()
@fumis_exception_handler
async def async_turn_on(self) -> None:
"""Turn on the stove."""
await self.coordinator.client.turn_on()
await self.coordinator.async_request_refresh()
@fumis_exception_handler
async def async_turn_off(self) -> None:
"""Turn off the stove."""
await self.coordinator.client.turn_off()
await self.coordinator.async_request_refresh()
@@ -0,0 +1,82 @@
"""Config flow to configure the Fumis integration."""
from __future__ import annotations
from typing import Any
from fumis import (
Fumis,
FumisAuthenticationError,
FumisConnectionError,
FumisStoveOfflineError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_MAC, CONF_PIN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DOMAIN, LOGGER
class FumisFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a Fumis config flow."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initiated by the user."""
errors: dict[str, str] = {}
if user_input is not None:
mac = user_input[CONF_MAC].replace(":", "").replace("-", "").upper()
fumis = Fumis(
mac=mac,
password=user_input[CONF_PIN],
session=async_get_clientsession(self.hass),
)
try:
info = await fumis.update_info()
except FumisAuthenticationError:
errors[CONF_PIN] = "invalid_auth"
except FumisStoveOfflineError:
errors["base"] = "device_offline"
except FumisConnectionError:
errors["base"] = "cannot_connect"
except Exception: # noqa: BLE001
LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(format_mac(mac), raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=info.controller.model_name or "Fumis",
data={
CONF_MAC: mac,
CONF_PIN: user_input[CONF_PIN],
},
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(
{
vol.Required(CONF_MAC): TextSelector(
TextSelectorConfig(autocomplete="off")
),
vol.Required(CONF_PIN): TextSelector(
TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
}
),
user_input,
),
errors=errors,
)
+11
View File
@@ -0,0 +1,11 @@
"""Constants for the Fumis integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Final
DOMAIN: Final = "fumis"
LOGGER = logging.getLogger(__package__)
SCAN_INTERVAL = timedelta(seconds=30)
@@ -0,0 +1,70 @@
"""DataUpdateCoordinator for Fumis."""
from __future__ import annotations
from fumis import (
Fumis,
FumisAuthenticationError,
FumisConnectionError,
FumisError,
FumisInfo,
FumisStoveOfflineError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_MAC, CONF_PIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
type FumisConfigEntry = ConfigEntry[FumisDataUpdateCoordinator]
class FumisDataUpdateCoordinator(DataUpdateCoordinator[FumisInfo]):
"""Class to manage fetching Fumis data."""
config_entry: FumisConfigEntry
def __init__(self, hass: HomeAssistant, entry: FumisConfigEntry) -> None:
"""Initialize the coordinator."""
self.client = Fumis(
mac=entry.data[CONF_MAC],
password=entry.data[CONF_PIN],
session=async_get_clientsession(hass),
)
super().__init__(
hass,
LOGGER,
config_entry=entry,
name=f"{DOMAIN}_{entry.unique_id}",
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> FumisInfo:
"""Fetch data from the Fumis API."""
try:
return await self.client.update_info()
except FumisAuthenticationError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="authentication_error",
) from err
except FumisStoveOfflineError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="stove_offline",
) from err
except FumisConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="communication_error",
translation_placeholders={"error": str(err)},
) from err
except FumisError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="unknown_error",
translation_placeholders={"error": str(err)},
) from err
+35
View File
@@ -0,0 +1,35 @@
"""Base entity for the Fumis integration."""
from __future__ import annotations
from homeassistant.const import CONF_MAC
from homeassistant.helpers.device_registry import (
CONNECTION_NETWORK_MAC,
DeviceInfo,
format_mac,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import FumisDataUpdateCoordinator
class FumisEntity(CoordinatorEntity[FumisDataUpdateCoordinator]):
"""Defines a Fumis entity."""
_attr_has_entity_name = True
def __init__(self, coordinator: FumisDataUpdateCoordinator) -> None:
"""Initialize a Fumis entity."""
super().__init__(coordinator=coordinator)
info = coordinator.data
mac = format_mac(coordinator.config_entry.data[CONF_MAC])
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, mac)},
connections={(CONNECTION_NETWORK_MAC, mac)},
manufacturer=info.controller.manufacturer or "Fumis",
model=info.controller.model_name,
name=info.controller.model_name or "Pellet stove",
sw_version=str(info.controller.version),
hw_version=str(info.unit.version),
)
+63
View File
@@ -0,0 +1,63 @@
"""Helpers for Fumis."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
from typing import Any, Concatenate
from fumis import (
FumisAuthenticationError,
FumisConnectionError,
FumisError,
FumisStoveOfflineError,
)
from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
from .entity import FumisEntity
def fumis_exception_handler[_FumisEntityT: FumisEntity, **_P](
func: Callable[Concatenate[_FumisEntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_FumisEntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate Fumis calls to handle exceptions.
A decorator that wraps the passed in function, catches Fumis errors.
"""
async def handler(self: _FumisEntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
try:
await func(self, *args, **kwargs)
self.coordinator.async_update_listeners()
except FumisAuthenticationError as error:
self.hass.config_entries.async_schedule_reload(
self.coordinator.config_entry.entry_id
)
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="authentication_error",
) from error
except FumisStoveOfflineError as error:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="stove_offline",
) from error
except FumisConnectionError as error:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="communication_error",
translation_placeholders={"error": str(error)},
) from error
except FumisError as error:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unknown_error",
translation_placeholders={"error": str(error)},
) from error
return handler
@@ -0,0 +1,12 @@
{
"domain": "fumis",
"name": "Fumis",
"codeowners": ["@frenck"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/fumis",
"integration_type": "device",
"iot_class": "cloud_polling",
"loggers": ["fumis"],
"quality_scale": "bronze",
"requirements": ["fumis==0.2.1"]
}
@@ -0,0 +1,78 @@
rules:
# Bronze
action-setup:
status: exempt
comment: This integration does not register custom actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not have any custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Entities of this integration do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: This integration does not have an options flow.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: todo
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery:
status: todo
comment: DHCP discovery can be added.
discovery-update-info:
status: todo
comment: DHCP discovery based update can be added.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: This integration connects to a single device.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: This integration does not raise any repairable issues.
stale-devices:
status: exempt
comment: This integration connects to a single device.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done
@@ -0,0 +1,40 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"device_offline": "Your stove's Fumis WiRCU Wi-Fi module is not connected to the internet. Make sure the module has power and is connected to your Wi-Fi network.",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"mac": "MAC address",
"pin": "PIN code"
},
"data_description": {
"mac": "The MAC address is a unique code of letters and numbers that identifies your stove. You can find it on the label of the Fumis WiRCU Wi-Fi module connected to your stove.",
"pin": "You can find the PIN code on the label of the Fumis WiRCU Wi-Fi module connected to your stove."
},
"description": "Integrate your Fumis-based pellet stove with Home Assistant to monitor and control it. You can see your stove's temperature, heating status, and adjust the target temperature right from your dashboard or use it in your automations. This way, you can make sure your home is always nice, warm, and comfortable."
}
}
},
"exceptions": {
"authentication_error": {
"message": "Authentication with the Fumis online service failed. Check your MAC address and PIN code."
},
"communication_error": {
"message": "An error occurred while communicating with the Fumis online service: {error}"
},
"stove_offline": {
"message": "Your stove's Fumis WiRCU Wi-Fi module is not connected to the internet."
},
"unknown_error": {
"message": "An unexpected error occurred while communicating with the Fumis online service: {error}"
}
}
}
@@ -133,7 +133,7 @@ DESCRIPTIONS = (
key=FlowStatistics.overall.unique_id,
translation_key="flow_statistics_overall",
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfVolume.LITERS,
char=FlowStatistics.overall,
@@ -141,6 +141,7 @@ DESCRIPTIONS = (
GardenaBluetoothSensorEntityDescription(
key=FlowStatistics.current.unique_id,
translation_key="flow_statistics_current",
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE,
@@ -150,7 +151,7 @@ DESCRIPTIONS = (
key=FlowStatistics.resettable.unique_id,
translation_key="flow_statistics_resettable",
state_class=SensorStateClass.TOTAL_INCREASING,
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfVolume.LITERS,
char=FlowStatistics.resettable,
@@ -166,6 +167,7 @@ DESCRIPTIONS = (
GardenaBluetoothSensorEntityDescription(
key=Spray.current_distance.unique_id,
translation_key="spray_current_distance",
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=PERCENTAGE,
char=Spray.current_distance,
@@ -75,7 +75,7 @@ class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN):
def is_matching(self, other_flow: Self) -> bool:
"""Return True if other_flow is matching this flow."""
return other_flow._ip_address == self._ip_address # noqa: SLF001
return other_flow._ip_address == self._ip_address
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -256,11 +256,13 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.error("Invalid response format during login: %s", ex)
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
if (
not login_response["success"]
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
):
return self._async_show_password_form({"base": ERROR_INVALID_AUTH})
if not login_response.get("success"):
if login_response.get("msg") == LOGIN_INVALID_AUTH_CODE:
return self._async_show_password_form({"base": ERROR_INVALID_AUTH})
_LOGGER.debug(
"Growatt login failed: %s", login_response.get("msg", "Unknown error")
)
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
self.user_id = login_response["user"]["id"]
self.data = user_input
+3 -32
View File
@@ -25,17 +25,15 @@ from aiohasupervisor.models import (
SupervisorOptions,
YellowOptions,
)
import voluptuous as vol
from homeassistant.auth.const import GROUP_ID_ADMIN
from homeassistant.auth.models import RefreshToken
from homeassistant.components import frontend, panel_custom
from homeassistant.components import frontend
from homeassistant.components.homeassistant import async_set_stop_handler
from homeassistant.components.http import (
CONF_SERVER_HOST,
CONF_SERVER_PORT,
CONF_SSL_CERTIFICATE,
StaticPathConfig,
)
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
from homeassistant.const import (
@@ -156,12 +154,7 @@ _LOGGER = logging.getLogger(__name__)
# wait for the import of the platforms
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
CONF_FRONTEND_REPO = "development_repo"
CONFIG_SCHEMA = vol.Schema(
{vol.Optional(DOMAIN): vol.Schema({vol.Optional(CONF_FRONTEND_REPO): cv.isdir})},
extra=vol.ALLOW_EXTRA,
)
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
DEPRECATION_URL = (
@@ -198,7 +191,7 @@ def hostname_from_addon_slug(addon_slug: str) -> str:
return addon_slug.replace("_", "-")
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Hass.io component."""
# Check local setup
for env in ("SUPERVISOR", "SUPERVISOR_TOKEN"):
@@ -250,30 +243,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
refresh_token = await hass.auth.async_create_refresh_token(user)
config_store.update(hassio_user=user.id)
# This overrides the normal API call that would be forwarded
development_repo = config.get(DOMAIN, {}).get(CONF_FRONTEND_REPO)
if development_repo is not None:
await hass.http.async_register_static_paths(
[
StaticPathConfig(
"/api/hassio/app",
os.path.join(development_repo, "hassio/build"),
False,
)
]
)
hass.http.register_view(HassIOView(host, websession))
await panel_custom.async_register_panel(
hass,
frontend_url_path="hassio",
webcomponent_name="hassio-main",
js_url="/api/hassio/app/entrypoint.js",
embed_iframe=True,
require_admin=True,
)
async def update_hass_api(http_config: dict[str, Any], refresh_token: RefreshToken):
"""Update Home Assistant API data on Hass.io."""
options = HomeAssistantOptions(
+4 -15
View File
@@ -14,7 +14,6 @@ from aiohttp import web
from aiohttp.client import ClientTimeout
from aiohttp.hdrs import (
AUTHORIZATION,
CACHE_CONTROL,
CONTENT_ENCODING,
CONTENT_LENGTH,
CONTENT_TYPE,
@@ -81,20 +80,13 @@ PATHS_ADMIN = re.compile(
r")$"
)
# Unauthenticated requests come in for Supervisor panel + add-on images
# Unauthenticated requests come in for add-on images
PATHS_NO_AUTH = re.compile(
r"^(?:"
r"|app/.*"
r"|(store/)?addons/[^/]+/(logo|icon)"
r")$"
)
NO_STORE = re.compile(
r"^(?:"
r"|app/entrypoint.js"
r")$"
)
# Follow logs should not be compressed, to be able to get streamed by frontend
NO_COMPRESS = re.compile(
r"^(?:"
@@ -218,7 +210,7 @@ class HassIOView(HomeAssistantView):
# Stream response
response = web.StreamResponse(
status=client.status, headers=_response_header(client, path)
status=client.status, headers=_response_header(client)
)
response.content_type = client.content_type
@@ -243,16 +235,13 @@ class HassIOView(HomeAssistantView):
post = _handle
def _response_header(response: aiohttp.ClientResponse, path: str) -> dict[str, str]:
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
"""Create response header."""
headers = {
return {
name: value
for name, value in response.headers.items()
if name not in RESPONSE_HEADERS_FILTER
}
if NO_STORE.match(path):
headers[CACHE_CONTROL] = "no-store, max-age=0"
return headers
def _get_timeout(path: str) -> ClientTimeout:
+1
View File
@@ -81,6 +81,7 @@ MODEL_INPUTS = {
"XLR 2",
"Analog 1",
"Analog 2",
"Analog 3",
"BNC",
"Coaxial",
"Optical 1",
@@ -452,6 +452,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
"arch": arch,
},
)
if not info["docker"] and not info["virtualenv"]:
ir.async_create_issue(
hass,
DOMAIN,
"unsupported_local_deps",
learn_more_url=DEPRECATION_URL,
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="unsupported_local_deps",
)
# Delay deprecation check to make sure installation method is determined correctly
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, _async_check_deprecation)
@@ -106,12 +106,12 @@
"title": "[%key:component::homeassistant::issues::deprecated_architecture::title%]"
},
"deprecated_method": {
"description": "This system is using the {installation_type} installation type, which has been deprecated and will become unsupported following the release of Home Assistant 2025.12. While you can continue using your current setup after that point, we strongly recommend migrating to a supported installation method.",
"title": "Deprecation notice: Installation method"
"description": "This system is using the {installation_type} installation type, which has been unsupported since Home Assistant 2025.12. To continue receiving updates and support, migrate to a supported installation method.",
"title": "Unsupported installation method"
},
"deprecated_method_architecture": {
"description": "This system is using the {installation_type} installation type, and 32-bit hardware (`{arch}`), both of which have been deprecated and will no longer be supported after the release of Home Assistant 2025.12.",
"title": "Deprecation notice"
"description": "This system is using the {installation_type} installation type, and 32-bit hardware (`{arch}`), both of which have been unsupported since Home Assistant 2025.12. To continue receiving updates and support, migrate to supported hardware and use a supported installation method.",
"title": "Unsupported installation method and architecture"
},
"deprecated_os_aarch64": {
"description": "This system is running on a 32-bit operating system (`armv7`), which has been deprecated and will no longer receive updates after the release of Home Assistant 2025.12. To continue using Home Assistant on this hardware, you will need to install a 64-bit operating system. Please refer to our [installation guide]({installation_guide}).",
@@ -203,6 +203,10 @@
}
},
"title": "Storage corruption detected for {storage_key}"
},
"unsupported_local_deps": {
"description": "This system is running Home Assistant outside a virtual environment or a Docker container. This is not supported and will not work after the release of Home Assistant 2026.11.",
"title": "Deprecation notice: Installation method"
}
},
"services": {
@@ -7,8 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
"integration_type": "system",
"requirements": [
"serialx==1.2.2",
"universal-silabs-flasher==1.0.3",
"universal-silabs-flasher==1.1.0",
"ha-silabs-firmware-client==0.3.0"
]
}
@@ -4,6 +4,9 @@
"hydrological_alert": {
"default": "mdi:alert-octagon-outline"
},
"ice_phenomena": {
"default": "mdi:snowflake"
},
"water_flow": {
"default": "mdi:waves-arrow-right"
},
+14 -1
View File
@@ -16,7 +16,12 @@ from homeassistant.components.sensor import (
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfLength, UnitOfTemperature, UnitOfVolumeFlowRate
from homeassistant.const import (
PERCENTAGE,
UnitOfLength,
UnitOfTemperature,
UnitOfVolumeFlowRate,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -60,6 +65,14 @@ SENSOR_TYPES: tuple[ImgwPibSensorEntityDescription, ...] = (
value=lambda data: data.hydrological_alert.value,
attrs=gen_alert_attributes,
),
ImgwPibSensorEntityDescription(
key="ice_phenomena",
translation_key="ice_phenomena",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value=lambda data: data.ice_phenomena.value,
suggested_display_precision=0,
),
ImgwPibSensorEntityDescription(
key="water_flow",
translation_key="water_flow",
@@ -59,6 +59,9 @@
}
}
},
"ice_phenomena": {
"name": "Ice phenomena"
},
"water_flow": {
"name": "Water flow"
},
@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/jewish_calendar",
"iot_class": "calculated",
"loggers": ["hdate"],
"requirements": ["hdate[astral]==1.1.2"],
"requirements": ["hdate[astral]==1.2.1"],
"single_config_entry": true
}
+1 -1
View File
@@ -13,7 +13,7 @@
"requirements": [
"xknx==3.15.0",
"xknxproject==3.8.2",
"knx-frontend==2026.3.28.223133"
"knx-frontend==2026.4.19.175239"
],
"single_config_entry": true
}
+10 -5
View File
@@ -154,6 +154,10 @@
}
},
"config_panel": {
"common": {
"group_address": "Group address",
"group_addresses": "Group addresses"
},
"dashboard": {
"connection_flow": {
"description": "Reconfigure KNX connection or import a new KNX keyring file",
@@ -960,6 +964,7 @@
"description": "Minimum time between consecutive sends. This can be used to prevent high traffic on the KNX bus when values change very frequently. Only the most recent value during the cooldown period is sent.",
"label": "Cooldown"
},
"copy_info": "Copying options of {entity_name} ({entity_id}).",
"default": {
"description": "The value to send if the entity state is `unavailable` or `unknown`, or if the attribute is not set. If `default` is omitted, nothing is sent in these cases, but the last known value remains available for read requests.",
"label": "Default value"
@@ -969,7 +974,7 @@
"label": "Entity"
},
"ga": {
"label": "Group address"
"label": "[%key:component::knx::config_panel::common::group_address%]"
},
"periodic_send": {
"description": "Time interval to automatically resend the current value to the KNX bus, even if it hasnt changed.",
@@ -1213,7 +1218,7 @@
"fields": {
"address": {
"description": "Group address(es) that shall be added or removed. Lists are allowed.",
"name": "[%key:component::knx::services::send::fields::address::name%]"
"name": "[%key:component::knx::config_panel::common::group_address%]"
},
"remove": {
"description": "Whether the group address(es) will be removed.",
@@ -1231,7 +1236,7 @@
"fields": {
"address": {
"description": "Group address state or attribute updates will be sent to. GroupValueRead requests will be answered. Per address only one exposure can be registered.",
"name": "[%key:component::knx::services::send::fields::address::name%]"
"name": "[%key:component::knx::config_panel::common::group_address%]"
},
"attribute": {
"description": "Attribute of the entity that shall be sent to the KNX bus. If not set, the state will be sent. Eg. for a light the state is either “on” or “off” - with attribute you can expose its “brightness”.",
@@ -1261,7 +1266,7 @@
"fields": {
"address": {
"description": "Group address(es) to send read request to. Lists will read multiple group addresses.",
"name": "[%key:component::knx::services::send::fields::address::name%]"
"name": "[%key:component::knx::config_panel::common::group_address%]"
}
},
"name": "Read from KNX bus"
@@ -1275,7 +1280,7 @@
"fields": {
"address": {
"description": "Group address(es) to write to. Lists will send to multiple group addresses successively.",
"name": "Group address"
"name": "[%key:component::knx::config_panel::common::group_address%]"
},
"payload": {
"description": "Payload to send to the bus. Integers are treated as DPT 1/2/3 payloads. For DPTs > 6 bits send a list. Each value represents 1 octet (0-255). Pad with 0 to DPT byte length.",
@@ -70,7 +70,7 @@ async def build_item_response(media_library, payload, get_thumbnail_url=None):
media_content_id=search_id,
media_content_type=search_type,
title=title,
can_play=search_type in PLAYABLE_MEDIA_TYPES and search_id,
can_play=bool(search_type in PLAYABLE_MEDIA_TYPES and search_id),
can_expand=True,
children=children,
thumbnail=thumbnail,
@@ -57,11 +57,11 @@ class LgIrTvMediaPlayer(LgIrEntity, MediaPlayerEntity):
async def async_turn_on(self) -> None:
"""Turn on the TV."""
await self._send_command(LGTVCode.POWER)
await self._send_command(LGTVCode.POWER_ON)
async def async_turn_off(self) -> None:
"""Turn off the TV."""
await self._send_command(LGTVCode.POWER)
await self._send_command(LGTVCode.POWER_OFF)
async def async_volume_up(self) -> None:
"""Send volume up command."""
+7 -7
View File
@@ -2,7 +2,7 @@
from __future__ import annotations
from collections.abc import Callable, Mapping
from collections.abc import Callable, Collection, Mapping
from typing import Any
from homeassistant.components.sensor import ATTR_STATE_CLASS, NON_NUMERIC_DEVICE_CLASSES
@@ -75,12 +75,12 @@ def _async_config_entries_for_ids(
def async_determine_event_types(
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
) -> tuple[EventType[Any] | str, ...]:
) -> set[EventType[Any] | str]:
"""Reduce the event types based on the entity ids and device ids."""
logbook_config: LogbookConfig = hass.data[DOMAIN]
external_events = logbook_config.external_events
if not entity_ids and not device_ids:
return (*BUILT_IN_EVENTS, *external_events)
return {*BUILT_IN_EVENTS, *external_events}
interested_domains: set[str] = set()
for entry_id in _async_config_entries_for_ids(hass, entity_ids, device_ids):
@@ -93,16 +93,16 @@ def async_determine_event_types(
# to add them since we have historically included
# them when matching only on entities
#
intrested_event_types: set[EventType[Any] | str] = {
interested_event_types: set[EventType[Any] | str] = {
external_event
for external_event, domain_call in external_events.items()
if domain_call[0] in interested_domains
} | AUTOMATION_EVENTS
if entity_ids:
# We also allow entity_ids to be recorded via manual logbook entries.
intrested_event_types.add(EVENT_LOGBOOK_ENTRY)
interested_event_types.add(EVENT_LOGBOOK_ENTRY)
return tuple(intrested_event_types)
return interested_event_types
@callback
@@ -187,7 +187,7 @@ def async_subscribe_events(
hass: HomeAssistant,
subscriptions: list[CALLBACK_TYPE],
target: Callable[[Event[Any]], None],
event_types: tuple[EventType[Any] | str, ...],
event_types: Collection[EventType[Any] | str],
entities_filter: Callable[[str], bool] | None,
entity_ids: list[str] | None,
device_ids: list[str] | None,
@@ -2,7 +2,7 @@
from __future__ import annotations
from collections.abc import Callable, Generator, Sequence
from collections.abc import Callable, Collection, Generator, Sequence
from dataclasses import dataclass, field
from datetime import datetime as dt
import logging
@@ -126,7 +126,7 @@ class EventProcessor:
def __init__(
self,
hass: HomeAssistant,
event_types: tuple[EventType[Any] | str, ...],
event_types: Collection[EventType[Any] | str],
entity_ids: list[str] | None = None,
device_ids: list[str] | None = None,
context_id: str | None = None,
@@ -20,7 +20,6 @@ from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util
from homeassistant.util.async_ import create_eager_task
from homeassistant.util.event_type import EventType
from .const import DOMAIN
from .helpers import (
@@ -366,16 +365,11 @@ async def ws_event_stream(
# cache parent user_ids as they fire. Historical queries don't — the
# context_only join fetches them by context_id regardless of type.
# Unfiltered streams already include it via BUILT_IN_EVENTS.
live_event_types: tuple[EventType[Any] | str, ...] = (
event_types
if EVENT_CALL_SERVICE in event_types
else (*event_types, EVENT_CALL_SERVICE)
)
async_subscribe_events(
hass,
subscriptions,
_queue_or_cancel,
live_event_types,
{*event_types, EVENT_CALL_SERVICE},
entities_filter,
entity_ids,
device_ids,
+5
View File
@@ -46,6 +46,11 @@ class LyricLocalOAuth2Implementation(
):
"""Lyric Local OAuth2 implementation."""
@property
def extra_authorize_data(self) -> dict:
"""Prompt the user to choose between Resideo and First Alert apps."""
return {"appSelect": "1"}
async def _token_request(self, data: dict) -> dict:
"""Make a token request."""
session = async_get_clientsession(self.hass)
@@ -8,6 +8,6 @@
"documentation": "https://www.home-assistant.io/integrations/matter",
"integration_type": "hub",
"iot_class": "local_push",
"requirements": ["matter-python-client==0.4.1"],
"requirements": ["matter-python-client==0.6.0"],
"zeroconf": ["_matter._tcp.local.", "_matterc._udp.local."]
}
@@ -337,8 +337,8 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
self._attr_brightness = last_attributes.get(
ATTR_BRIGHTNESS, self.brightness
)
self._attr_color_mode = last_attributes.get(
ATTR_COLOR_MODE, self.color_mode
self._attr_color_mode = (
last_attributes.get(ATTR_COLOR_MODE) or self.color_mode
)
self._attr_color_temp_kelvin = last_attributes.get(
ATTR_COLOR_TEMP_KELVIN, self.color_temp_kelvin
@@ -53,6 +53,7 @@ PLATFORMS = [
Platform.BUTTON,
Platform.MEDIA_PLAYER,
Platform.NUMBER,
Platform.SELECT,
Platform.SWITCH,
Platform.TEXT,
]
@@ -0,0 +1,123 @@
"""Music Assistant select platform."""
from __future__ import annotations
from typing import Final
from music_assistant_client.client import MusicAssistantClient
from music_assistant_models.player import PlayerOption, PlayerOptionType
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import MusicAssistantConfigEntry
from .entity import MusicAssistantPlayerOptionEntity
from .helpers import catch_musicassistant_error
PLAYER_OPTIONS_SELECT: Final[dict[str, bool]] = {
# translation_key: enabled_by_default
"dimmer": False,
"equalizer_mode": False,
"link_audio_delay": True,
"link_audio_quality": False,
"link_control": False,
"sleep": False,
"surround_decoder_type": False,
"tone_control_mode": True,
}
async def async_setup_entry(
hass: HomeAssistant,
entry: MusicAssistantConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Music Assistant Select Entities (Player Options) from Config Entry."""
mass = entry.runtime_data.mass
def add_player(player_id: str) -> None:
"""Handle add player."""
player = mass.players.get(player_id)
if player is None:
return
entities: list[MusicAssistantPlayerConfigSelect] = []
for player_option in player.options:
if (
not player_option.read_only
and player_option.type
!= PlayerOptionType.BOOLEAN # these always go to switch
and player_option.options
):
# We ignore entities with unknown translation key for the base name.
# However, we accept a non-available translation_key in strings.json for the entity's state,
# as these are oftentimes dynamically created, dependent on a specific player and might not be known to the provider
# developer. In that case, the frontend falls back to showing the state's bare translation key.
if player_option.translation_key not in PLAYER_OPTIONS_SELECT:
continue
entities.append(
MusicAssistantPlayerConfigSelect(
mass,
player_id,
player_option=player_option,
entity_description=SelectEntityDescription(
key=player_option.key,
translation_key=player_option.translation_key,
entity_registry_enabled_default=PLAYER_OPTIONS_SELECT[
player_option.translation_key
],
),
)
)
async_add_entities(entities)
# register callback to add players when they are discovered
entry.runtime_data.platform_handlers.setdefault(Platform.SELECT, add_player)
class MusicAssistantPlayerConfigSelect(MusicAssistantPlayerOptionEntity, SelectEntity):
"""Representation of a select entity to control player provider dependent settings."""
def __init__(
self,
mass: MusicAssistantClient,
player_id: str,
player_option: PlayerOption,
entity_description: SelectEntityDescription,
) -> None:
"""Initialize MusicAssistantPlayerConfigSelect."""
# this was verified already in the entry callback
assert player_option.options is not None
# we have to define the dicts before initializing the parent, as this
# then calls self.on_player_option_update
self._option_translation_key_to_key_mapping = {
option.translation_key: option.key for option in player_option.options
}
self._option_key_to_translation_key_mapping = {
option.key: option.translation_key for option in player_option.options
}
super().__init__(mass, player_id, player_option)
self.entity_description = entity_description
self._attr_options = list(self._option_translation_key_to_key_mapping.keys())
@catch_musicassistant_error
async def async_select_option(self, option: str) -> None:
"""Select an option."""
await self.mass.players.set_option(
self.player_id,
self.mass_option_key,
self._option_translation_key_to_key_mapping[option],
)
def on_player_option_update(self, player_option: PlayerOption) -> None:
"""Update on player option update."""
self._attr_current_option = (
self._option_key_to_translation_key_mapping.get(player_option.value)
if isinstance(player_option.value, str)
else None
)
@@ -147,6 +147,80 @@
"name": "Treble"
}
},
"select": {
"dimmer": {
"name": "Dimmer",
"state": {
"auto": "[%key:common::state::auto%]"
}
},
"equalizer_mode": {
"name": "Equalizer mode",
"state": {
"auto": "[%key:common::state::auto%]",
"bypass": "Bypass",
"manual": "[%key:common::state::manual%]"
}
},
"link_audio_delay": {
"name": "Link audio delay",
"state": {
"audio_sync": "Audio synchronization",
"audio_sync_off": "Audio synchronization off",
"audio_sync_on": "Audio synchronization on",
"balanced": "Balanced",
"lip_sync": "Lip synchronization"
}
},
"link_audio_quality": {
"name": "Link audio quality",
"state": {
"compressed": "Compressed",
"uncompressed": "Uncompressed"
}
},
"link_control": {
"name": "Link control",
"state": {
"speed": "Speed",
"stability": "Stability",
"standard": "Standard"
}
},
"sleep": {
"name": "Sleep timer",
"state": {
"0": "[%key:common::state::off%]",
"30": "30 minutes",
"60": "60 minutes",
"90": "90 minutes",
"120": "120 minutes"
}
},
"surround_decoder_type": {
"name": "Surround decoder type",
"state": {
"auto": "[%key:common::state::auto%]",
"dolby_pl": "Dolby ProLogic",
"dolby_pl2x_game": "Dolby ProLogic 2x Game",
"dolby_pl2x_movie": "Dolby ProLogic 2x Movie",
"dolby_pl2x_music": "Dolby ProLogic 2x Music",
"dolby_surround": "Dolby Surround",
"dts_neo6_cinema": "DTS Neo:6 Cinema",
"dts_neo6_music": "DTS Neo:6 Music",
"dts_neural_x": "DTS Neural:X",
"toggle": "[%key:common::action::toggle%]"
}
},
"tone_control_mode": {
"name": "Tone control mode",
"state": {
"auto": "[%key:common::state::auto%]",
"bypass": "Bypass",
"manual": "[%key:common::state::manual%]"
}
}
},
"switch": {
"adaptive_drc": {
"name": "Adaptive DRC"
@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["nikohomecontrol"],
"requirements": ["nhc==0.7.0"]
"requirements": ["nhc==0.8.0"]
}
+73 -12
View File
@@ -5,11 +5,25 @@ from __future__ import annotations
from pynobo import nobo
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_IP_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.const import (
ATTR_NAME,
CONF_IP_ADDRESS,
EVENT_HOMEASSISTANT_STOP,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.util import dt as dt_util
from .const import CONF_AUTO_DISCOVERED, CONF_SERIAL
from .const import (
ATTR_HARDWARE_VERSION,
ATTR_SOFTWARE_VERSION,
CONF_AUTO_DISCOVERED,
CONF_SERIAL,
DOMAIN,
NOBO_MANUFACTURER,
)
PLATFORMS = [Platform.CLIMATE, Platform.SELECT, Platform.SENSOR]
@@ -20,16 +34,51 @@ async def async_setup_entry(hass: HomeAssistant, entry: NoboHubConfigEntry) -> b
"""Set up Nobø Ecohub from a config entry."""
serial = entry.data[CONF_SERIAL]
discover = entry.data[CONF_AUTO_DISCOVERED]
ip_address = None if discover else entry.data[CONF_IP_ADDRESS]
hub = nobo(
serial=serial,
ip=ip_address,
discover=discover,
synchronous=False,
timezone=dt_util.get_default_time_zone(),
)
await hub.connect()
stored_ip = entry.data[CONF_IP_ADDRESS]
auto_discovered = entry.data[CONF_AUTO_DISCOVERED]
async def _connect(ip: str) -> nobo:
hub = nobo(
serial=serial,
ip=ip,
discover=False,
synchronous=False,
timezone=dt_util.get_default_time_zone(),
)
await hub.connect()
return hub
try:
hub = await _connect(stored_ip)
except OSError as err:
if not auto_discovered:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect_manual",
translation_placeholders={"serial": serial, "ip": stored_ip},
) from err
# Stored IP may be stale for an auto-discovered entry - try UDP
# rediscovery to pick up a new DHCP lease.
discovered = await nobo.async_discover_hubs(serial=serial)
if not discovered:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="hub_not_found",
translation_placeholders={"serial": serial},
) from err
new_ip, _ = next(iter(discovered))
try:
hub = await _connect(new_ip)
except OSError as rediscover_err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect_rediscovered",
translation_placeholders={"ip": new_ip},
) from rediscover_err
if new_ip != stored_ip:
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_IP_ADDRESS: new_ip}
)
async def _async_close(event):
"""Close the Nobø Ecohub socket connection when HA stops."""
@@ -40,6 +89,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: NoboHubConfigEntry) -> b
)
entry.runtime_data = hub
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, hub.hub_serial)},
serial_number=hub.hub_serial,
name=hub.hub_info[ATTR_NAME],
manufacturer=NOBO_MANUFACTURER,
model="Nobø Ecohub",
sw_version=hub.hub_info[ATTR_SOFTWARE_VERSION],
hw_version=hub.hub_info[ATTR_HARDWARE_VERSION],
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
await hub.start()
@@ -47,6 +47,17 @@
}
}
},
"exceptions": {
"cannot_connect_manual": {
"message": "Unable to connect to Nobø Ecohub with serial {serial} at {ip}. If the hub has moved to a new IP address, remove and re-add the integration."
},
"cannot_connect_rediscovered": {
"message": "Unable to connect to Nobø Ecohub at rediscovered IP {ip}; will retry."
},
"hub_not_found": {
"message": "Nobø Ecohub with serial {serial} not found on the network. The hub may be offline or on a different subnet; will retry."
}
},
"options": {
"step": {
"init": {
@@ -114,7 +114,7 @@ MAX_TOOL_ITERATIONS = 10
def _adjust_schema(schema: dict[str, Any]) -> None:
"""Adjust the schema to be compatible with OpenAI API."""
"""Adjust the output schema to be compatible with OpenAI API."""
if schema["type"] == "object":
schema.setdefault("strict", True)
schema.setdefault("additionalProperties", False)
@@ -158,10 +158,15 @@ def _format_tool(
tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None
) -> FunctionToolParam:
"""Format tool specification."""
unsupported_keys = {"oneOf", "anyOf", "allOf", "enum", "not"}
schema = convert(tool.parameters, custom_serializer=custom_serializer)
if unsupported_keys.intersection(schema):
schema = {k: v for k, v in schema.items() if k not in unsupported_keys}
return FunctionToolParam(
type="function",
name=tool.name,
parameters=convert(tool.parameters, custom_serializer=custom_serializer),
parameters=schema,
description=tool.description,
strict=False,
)
@@ -8,7 +8,11 @@ from typing import Any
from powerfox import Powerfox, PowerfoxAuthenticationError, PowerfoxConnectionError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import (
SOURCE_RECONFIGURE,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
@@ -38,26 +42,27 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN):
errors = {}
if user_input is not None:
self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]})
client = Powerfox(
username=user_input[CONF_EMAIL],
password=user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
error = await self._async_validate_credentials(
user_input[CONF_EMAIL], user_input[CONF_PASSWORD]
)
try:
await client.all_devices()
except PowerfoxAuthenticationError:
errors["base"] = "invalid_auth"
except PowerfoxConnectionError:
errors["base"] = "cannot_connect"
if error:
errors["base"] = error
elif self.source == SOURCE_RECONFIGURE:
reconfigure_entry = self._get_reconfigure_entry()
if reconfigure_entry.data[CONF_EMAIL] != user_input[CONF_EMAIL]:
self._async_abort_entries_match(
{CONF_EMAIL: user_input[CONF_EMAIL]}
)
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates=user_input
)
else:
self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]})
return self.async_create_entry(
title=user_input[CONF_EMAIL],
data={
CONF_EMAIL: user_input[CONF_EMAIL],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
data=user_input,
)
return self.async_show_form(
step_id="user",
errors=errors,
@@ -78,22 +83,17 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN):
reauth_entry = self._get_reauth_entry()
if user_input is not None:
client = Powerfox(
username=reauth_entry.data[CONF_EMAIL],
password=user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
error = await self._async_validate_credentials(
reauth_entry.data[CONF_EMAIL], user_input[CONF_PASSWORD]
)
try:
await client.all_devices()
except PowerfoxAuthenticationError:
errors["base"] = "invalid_auth"
except PowerfoxConnectionError:
errors["base"] = "cannot_connect"
if error:
errors["base"] = error
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates=user_input,
)
return self.async_show_form(
step_id="reauth_confirm",
description_placeholders={"email": reauth_entry.data[CONF_EMAIL]},
@@ -104,32 +104,22 @@ class PowerfoxConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Reconfigure Powerfox configuration."""
errors = {}
"""Handle reconfiguration."""
return await self.async_step_user()
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
client = Powerfox(
username=user_input[CONF_EMAIL],
password=user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
)
try:
await client.all_devices()
except PowerfoxAuthenticationError:
errors["base"] = "invalid_auth"
except PowerfoxConnectionError:
errors["base"] = "cannot_connect"
else:
if reconfigure_entry.data[CONF_EMAIL] != user_input[CONF_EMAIL]:
self._async_abort_entries_match(
{CONF_EMAIL: user_input[CONF_EMAIL]}
)
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates=user_input
)
return self.async_show_form(
step_id="reconfigure",
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors,
async def _async_validate_credentials(
self, email: str, password: str
) -> str | None:
"""Validate credentials and return error string or None if valid."""
client = Powerfox(
username=email,
password=password,
session=async_get_clientsession(self.hass),
)
try:
await client.all_devices()
except PowerfoxAuthenticationError:
return "invalid_auth"
except PowerfoxConnectionError:
return "cannot_connect"
return None
@@ -20,18 +20,6 @@
"description": "The password for {email} is no longer valid.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"reconfigure": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "[%key:component::powerfox::config::step::user::data_description::email%]",
"password": "[%key:component::powerfox::config::step::user::data_description::password%]"
},
"description": "Powerfox is already configured. Would you like to reconfigure it?",
"title": "Reconfigure your Powerfox account"
},
"user": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
@@ -27,6 +27,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_SYSTEM_ID, DOMAIN
from .coordinator import PvOutputConfigEntry, PVOutputDataUpdateCoordinator
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class PVOutputSensorEntityDescription(SensorEntityDescription):
@@ -257,6 +257,7 @@ SENSOR_DESCRIPTIONS = [
RoborockSensorDescription(
key="mop_clean_remaining",
native_unit_of_measurement=UnitOfTime.SECONDS,
suggested_unit_of_measurement=UnitOfTime.HOURS,
device_class=SensorDeviceClass.DURATION,
value_fn=lambda data: data.status.rdt,
translation_key="mop_drying_remaining_time",
@@ -336,7 +336,7 @@ def _async_get_roomba_discovery() -> RoombaDiscovery:
@callback
def _async_blid_from_hostname(hostname: str) -> str:
"""Extract the blid from the hostname."""
return hostname.split("-")[1].split(".")[0].upper()
return hostname.split("-")[1].split(".", maxsplit=1)[0].upper()
async def _async_discover_roombas(
@@ -4,7 +4,7 @@
"codeowners": ["@noahhusby"],
"documentation": "https://www.home-assistant.io/integrations/russound_rnet",
"iot_class": "local_polling",
"loggers": ["russound"],
"loggers": ["aiorussound"],
"quality_scale": "legacy",
"requirements": ["russound==0.2.0"]
"requirements": ["aiorussound==5.0.1"]
}
@@ -2,10 +2,16 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable, Coroutine
import contextlib
import logging
import math
from typing import Any
from russound import russound
from aiorussound import RussoundTcpConnectionHandler
from aiorussound.exceptions import CommandError
from aiorussound.rnet.client import RussoundRNETClient
import voluptuous as vol
from homeassistant.components.media_player import (
@@ -14,8 +20,14 @@ from homeassistant.components.media_player import (
MediaPlayerEntityFeature,
MediaPlayerState,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
@@ -25,6 +37,13 @@ _LOGGER = logging.getLogger(__name__)
CONF_ZONES = "zones"
CONF_SOURCES = "sources"
RNET_EXCEPTIONS = (
CommandError,
ConnectionRefusedError,
TimeoutError,
asyncio.IncompleteReadError,
OSError,
)
ZONE_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
@@ -40,33 +59,45 @@ PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
}
)
# Max volume level on RNET devices
_MAX_VOLUME = 50
def setup_platform(
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Russound RNET platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
host = config[CONF_HOST]
port = config[CONF_PORT]
if host is None or port is None:
_LOGGER.error("Invalid config. Expected %s and %s", CONF_HOST, CONF_PORT)
return
client = RussoundRNETClient(RussoundTcpConnectionHandler(host, port))
try:
await client.connect()
except RNET_EXCEPTIONS as err:
raise PlatformNotReady(
f"Could not connect to Russound RNET at {host}:{port}"
) from err
russ = russound.Russound(host, port)
russ.connect()
sources = [source[CONF_NAME] for source in config[CONF_SOURCES]]
lock = asyncio.Lock()
sources = [source["name"] for source in config[CONF_SOURCES]]
async def _async_disconnect(*_: Any) -> None:
"""Disconnect the RNET client on HA shutdown."""
with contextlib.suppress(*RNET_EXCEPTIONS):
await client.disconnect()
if russ.is_connected():
for zone_id, extra in config[CONF_ZONES].items():
add_entities(
[RussoundRNETDevice(hass, russ, sources, zone_id, extra)], True
)
else:
_LOGGER.error("Not connected to %s:%s", host, port)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_disconnect)
async_add_entities(
[
RussoundRNETDevice(client, lock, sources, zone_id, extra)
for zone_id, extra in config[CONF_ZONES].items()
],
True,
)
class RussoundRNETDevice(MediaPlayerEntity):
@@ -80,75 +111,123 @@ class RussoundRNETDevice(MediaPlayerEntity):
| MediaPlayerEntityFeature.SELECT_SOURCE
)
def __init__(self, hass, russ, sources, zone_id, extra):
def __init__(
self,
client: RussoundRNETClient,
lock: asyncio.Lock,
sources: list[str],
zone_id: int,
extra: dict[str, str],
) -> None:
"""Initialise the Russound RNET device."""
self._attr_name = extra["name"]
self._russ = russ
self._attr_name = extra[CONF_NAME]
self._client = client
self._lock = lock
self._attr_source_list = sources
# Each controller has a maximum of 6 zones, every increment of 6 zones
# maps to an additional controller for easier backward compatibility
self._controller_id = str(math.ceil(zone_id / 6))
# Each zone resets to 1-6 per controller
self._controller_id = math.ceil(zone_id / 6)
self._zone_id = (zone_id - 1) % 6 + 1
def update(self) -> None:
async def _async_ensure_connected(self) -> None:
"""Ensure the client is connected, reconnecting if needed."""
if not self._client.is_connected:
_LOGGER.debug("Reconnecting RNET client")
await self._client.connect()
async def _async_run_with_retry(
self, command: Callable[[], Coroutine[Any, Any, Any]]
) -> None:
"""Run a command with reconnect retry on failure."""
async with self._lock:
try:
await self._async_ensure_connected()
await command()
except RNET_EXCEPTIONS:
with contextlib.suppress(*RNET_EXCEPTIONS):
await self._client.disconnect()
try:
await self._async_ensure_connected()
await command()
except RNET_EXCEPTIONS:
_LOGGER.error(
"Command failed for zone %s on controller %s after retry",
self._zone_id,
self._controller_id,
)
async def async_update(self) -> None:
"""Retrieve latest state."""
# Updated this function to make a single call to get_zone_info, so that
# with a single call we can get On/Off, Volume and Source, reducing the
# amount of traffic and speeding up the update process.
try:
ret = self._russ.get_zone_info(self._controller_id, self._zone_id, 4)
except BrokenPipeError:
_LOGGER.error("Broken Pipe Error, trying to reconnect to Russound RNET")
self._russ.connect()
ret = self._russ.get_zone_info(self._controller_id, self._zone_id, 4)
async with self._lock:
try:
await self._async_ensure_connected()
info = await self._client.get_all_zone_info(
self._controller_id, self._zone_id
)
except RNET_EXCEPTIONS:
with contextlib.suppress(*RNET_EXCEPTIONS):
await self._client.disconnect()
try:
await self._async_ensure_connected()
info = await self._client.get_all_zone_info(
self._controller_id, self._zone_id
)
except RNET_EXCEPTIONS:
_LOGGER.error(
"Could not update zone %s on controller %s",
self._zone_id,
self._controller_id,
)
self._attr_available = False
return
_LOGGER.debug("ret= %s", ret)
if ret is not None:
_LOGGER.debug(
"Updating status for RNET zone %s on controller %s",
self._zone_id,
self._controller_id,
self._attr_available = True
self._attr_state = MediaPlayerState.ON if info.power else MediaPlayerState.OFF
self._attr_volume_level = info.volume / _MAX_VOLUME
# info.source is 1-based; source_list is 0-based
index = info.source - 1
if self.source_list and 0 <= index < len(self.source_list):
self._attr_source = self.source_list[index]
async def async_set_volume_level(self, volume: float) -> None:
"""Set volume level. Volume has a range (0..1)."""
device_volume = max(0, min(_MAX_VOLUME, int(volume * _MAX_VOLUME)))
await self._async_run_with_retry(
lambda: self._client.set_volume(
self._controller_id, self._zone_id, device_volume
)
if ret[0] == 0:
self._attr_state = MediaPlayerState.OFF
else:
self._attr_state = MediaPlayerState.ON
self._attr_volume_level = ret[2] * 2 / 100.0
# Returns 0 based index for source.
index = ret[1]
# Possibility exists that user has defined list of all sources.
# If a source is set externally that is beyond the defined list then
# an exception will be thrown.
# In this case return and unknown source (None)
if self.source_list and 0 <= index < len(self.source_list):
self._attr_source = self.source_list[index]
else:
_LOGGER.error("Could not update status for zone %s", self._zone_id)
)
def set_volume_level(self, volume: float) -> None:
"""Set volume level. Volume has a range (0..1).
Translate this to a range of (0..100) as expected
by _russ.set_volume()
"""
self._russ.set_volume(self._controller_id, self._zone_id, volume * 100)
def turn_on(self) -> None:
async def async_turn_on(self) -> None:
"""Turn the media player on."""
self._russ.set_power(self._controller_id, self._zone_id, "1")
await self._async_run_with_retry(
lambda: self._client.set_zone_power(
self._controller_id, self._zone_id, True
)
)
def turn_off(self) -> None:
async def async_turn_off(self) -> None:
"""Turn off media player."""
self._russ.set_power(self._controller_id, self._zone_id, "0")
await self._async_run_with_retry(
lambda: self._client.set_zone_power(
self._controller_id, self._zone_id, False
)
)
def mute_volume(self, mute: bool) -> None:
async def async_mute_volume(self, mute: bool) -> None:
"""Send mute command."""
self._russ.toggle_mute(self._controller_id, self._zone_id)
def select_source(self, source: str) -> None:
async def _mute_if_needed() -> None:
if self.is_volume_muted != mute:
await self._client.toggle_mute(self._controller_id, self._zone_id)
await self._async_run_with_retry(_mute_if_needed)
async def async_select_source(self, source: str) -> None:
"""Set the input source."""
if self.source_list and source in self.source_list:
index = self.source_list.index(source)
# 0 based value for source
self._russ.set_source(self._controller_id, self._zone_id, index)
# source_list is 0-based; RNET source is 1-based
index = self.source_list.index(source) + 1
await self._async_run_with_retry(
lambda: self._client.select_source(
self._controller_id, self._zone_id, index
)
)
@@ -9,10 +9,7 @@ rules:
common-modules: todo
config-flow-test-coverage: todo
config-flow: todo
dependency-transparency:
status: todo
comment: |
CI pipeline for publishing is not on GH repo.
dependency-transparency: done
docs-actions:
status: exempt
comment: |
@@ -87,7 +84,7 @@ rules:
This integration is not a hub and only represents a single device.
# Platinum
async-dependency: todo
async-dependency: done
inject-websession:
status: exempt
comment: |
@@ -4,5 +4,5 @@
"codeowners": ["@fabaff"],
"documentation": "https://www.home-assistant.io/integrations/serial",
"iot_class": "local_polling",
"requirements": ["serialx==1.2.2"]
"requirements": ["serialx==1.4.1"]
}
+3 -11
View File
@@ -3,7 +3,6 @@
from __future__ import annotations
import asyncio
from typing import TYPE_CHECKING
from sfrbox_api.bridge import SFRBox
from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError
@@ -14,14 +13,13 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN, PLATFORMS, PLATFORMS_WITH_AUTH
from .const import DOMAIN, PLATFORMS
from .coordinator import SFRConfigEntry, SFRDataUpdateCoordinator, SFRRuntimeData
async def async_setup_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
"""Set up SFR box as config entry."""
box = SFRBox(ip=entry.data[CONF_HOST], client=async_get_clientsession(hass))
platforms = PLATFORMS
has_auth = False
if (username := entry.data.get(CONF_USERNAME)) and (
password := entry.data.get(CONF_PASSWORD)
@@ -39,11 +37,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
translation_key="unknown_error",
translation_placeholders={"error": str(err)},
) from err
platforms = PLATFORMS_WITH_AUTH
has_auth = True
data = SFRRuntimeData(
box=box,
has_authentication=has_auth,
dsl=SFRDataUpdateCoordinator(
hass, entry, box, "dsl", lambda b: b.dsl_get_info()
),
@@ -65,8 +63,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
# Preload system information
await data.system.async_config_entry_first_refresh()
system_info = data.system.data
if TYPE_CHECKING:
assert system_info is not None
# Preload other coordinators (based on net infrastructure)
tasks = [data.wan.async_config_entry_first_refresh()]
@@ -91,15 +87,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
)
entry.runtime_data = data
await hass.config_entries.async_forward_entry_setups(entry, platforms)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: SFRConfigEntry) -> bool:
"""Unload a config entry."""
if entry.data.get(CONF_USERNAME) and entry.data.get(CONF_PASSWORD):
return await hass.config_entries.async_unload_platforms(
entry, PLATFORMS_WITH_AUTH
)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING
from sfrbox_api.models import DslInfo, FtthInfo, VoipInfo, WanInfo
@@ -88,8 +87,6 @@ async def async_setup_entry(
"""Set up the sensors."""
data = entry.runtime_data
system_info = data.system.data
if TYPE_CHECKING:
assert system_info is not None
entities: list[SFRBoxBinarySensor] = [
SFRBoxBinarySensor(data.wan, description, system_info)
+5 -3
View File
@@ -5,7 +5,7 @@ from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine
from dataclasses import dataclass
from functools import wraps
from typing import TYPE_CHECKING, Any, Concatenate
from typing import Any, Concatenate
from sfrbox_api.bridge import SFRBox
from sfrbox_api.exceptions import SFRBoxError
@@ -78,9 +78,11 @@ async def async_setup_entry(
) -> None:
"""Set up the buttons."""
data = entry.runtime_data
if not data.has_authentication:
# All buttons currently require authentication
return
system_info = data.system.data
if TYPE_CHECKING:
assert system_info is not None
entities = [
SFRBoxButton(data.box, description, system_info) for description in BUTTON_TYPES
+1 -2
View File
@@ -7,5 +7,4 @@ DEFAULT_USERNAME = "admin"
DOMAIN = "sfr_box"
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
PLATFORMS_WITH_AUTH = [*PLATFORMS, Platform.BUTTON]
PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR]
@@ -29,6 +29,7 @@ class SFRRuntimeData:
"""Runtime data for SFR Box."""
box: SFRBox
has_authentication: bool
dsl: SFRDataUpdateCoordinator[DslInfo]
ftth: SFRDataUpdateCoordinator[FtthInfo]
system: SFRDataUpdateCoordinator[SystemInfo]
@@ -2,7 +2,6 @@
from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING
from sfrbox_api.models import DslInfo, SystemInfo, VoipInfo, WanInfo
@@ -236,8 +235,6 @@ async def async_setup_entry(
"""Set up the sensors."""
data = entry.runtime_data
system_info = data.system.data
if TYPE_CHECKING:
assert system_info is not None
entities: list[SFRBoxSensor] = [
SFRBoxSensor(data.system, description, system_info)
@@ -7,5 +7,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["PyTurboJPEG==2.2.0", "av==16.0.1", "numpy==2.3.2"]
"requirements": ["PyTurboJPEG==1.8.3", "av==16.0.1", "numpy==2.3.2"]
}
+5 -10
View File
@@ -2,30 +2,25 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from .coordinator import TailscaleDataUpdateCoordinator
from .coordinator import TailscaleConfigEntry, TailscaleDataUpdateCoordinator
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: TailscaleConfigEntry) -> bool:
"""Set up Tailscale from a config entry."""
coordinator = TailscaleDataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: TailscaleConfigEntry) -> bool:
"""Unload Tailscale config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
del hass.data[DOMAIN][entry.entry_id]
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@@ -12,14 +12,15 @@ from homeassistant.components.binary_sensor import (
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import TailscaleConfigEntry
from .entity import TailscaleEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class TailscaleBinarySensorEntityDescription(BinarySensorEntityDescription):
@@ -97,11 +98,11 @@ BINARY_SENSORS: tuple[TailscaleBinarySensorEntityDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: TailscaleConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up a Tailscale binary sensors based on a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
coordinator = entry.runtime_data
async_add_entities(
TailscaleBinarySensorEntity(
coordinator=coordinator,
@@ -14,13 +14,15 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_TAILNET, DOMAIN, LOGGER, SCAN_INTERVAL
type TailscaleConfigEntry = ConfigEntry[TailscaleDataUpdateCoordinator]
class TailscaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Device]]):
"""The Tailscale Data Update Coordinator."""
config_entry: ConfigEntry
config_entry: TailscaleConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
def __init__(self, hass: HomeAssistant, config_entry: TailscaleConfigEntry) -> None:
"""Initialize the Tailscale coordinator."""
session = async_get_clientsession(hass)
self.tailscale = Tailscale(
@@ -6,12 +6,11 @@ import json
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY
from homeassistant.core import HomeAssistant
from .const import CONF_TAILNET, DOMAIN
from .coordinator import TailscaleDataUpdateCoordinator
from .const import CONF_TAILNET
from .coordinator import TailscaleConfigEntry
TO_REDACT = {
CONF_API_KEY,
@@ -22,16 +21,19 @@ TO_REDACT = {
"hostname",
"machine_key",
"name",
"node_id",
"node_key",
"tailnet_lock_key",
"user",
}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: ConfigEntry
hass: HomeAssistant, entry: TailscaleConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: TailscaleDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
# Round-trip via JSON to trigger serialization
devices = [json.loads(device.to_json()) for device in coordinator.data.values()]
devices = [
json.loads(device.to_json()) for device in entry.runtime_data.data.values()
]
return async_redact_data({"devices": devices}, TO_REDACT)
+4 -6
View File
@@ -6,15 +6,13 @@ from tailscale import Device as TailscaleDevice
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import TailscaleDataUpdateCoordinator
class TailscaleEntity(CoordinatorEntity):
class TailscaleEntity(CoordinatorEntity[TailscaleDataUpdateCoordinator]):
"""Defines a Tailscale base entity."""
_attr_has_entity_name = True
@@ -22,7 +20,7 @@ class TailscaleEntity(CoordinatorEntity):
def __init__(
self,
*,
coordinator: DataUpdateCoordinator,
coordinator: TailscaleDataUpdateCoordinator,
device: TailscaleDevice,
description: EntityDescription,
) -> None:
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/tailscale",
"integration_type": "hub",
"iot_class": "cloud_polling",
"requirements": ["tailscale==0.6.2"]
"requirements": ["tailscale==0.7.0"]
}
+5 -4
View File
@@ -13,14 +13,15 @@ from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import TailscaleConfigEntry
from .entity import TailscaleEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class TailscaleSensorEntityDescription(SensorEntityDescription):
@@ -54,11 +55,11 @@ SENSORS: tuple[TailscaleSensorEntityDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: TailscaleConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up a Tailscale sensors based on a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
coordinator = entry.runtime_data
async_add_entities(
TailscaleSensorEntity(
coordinator=coordinator,
+6 -13
View File
@@ -4,18 +4,17 @@ from __future__ import annotations
from Tami4EdgeAPI import Tami4EdgeAPI, exceptions
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from .const import API, CONF_REFRESH_TOKEN, COORDINATOR, DOMAIN
from .coordinator import Tami4EdgeCoordinator
from .const import CONF_REFRESH_TOKEN
from .coordinator import Tami4ConfigEntry, Tami4EdgeCoordinator
PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: Tami4ConfigEntry) -> bool:
"""Set up tami4 from a config entry."""
refresh_token = entry.data.get(CONF_REFRESH_TOKEN)
@@ -29,19 +28,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
coordinator = Tami4EdgeCoordinator(hass, entry, api)
await coordinator.async_config_entry_first_refresh()
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
API: api,
COORDINATOR: coordinator,
}
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: Tami4ConfigEntry) -> bool:
"""Unload a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
+3 -4
View File
@@ -8,12 +8,11 @@ from Tami4EdgeAPI import Tami4EdgeAPI
from Tami4EdgeAPI.drink import Drink
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import API, DOMAIN
from .coordinator import Tami4ConfigEntry
from .entity import Tami4EdgeBaseEntity
_LOGGER = logging.getLogger(__name__)
@@ -42,12 +41,12 @@ BOIL_WATER_BUTTON = Tami4EdgeButtonEntityDescription(
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: Tami4ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Perform the setup for Tami4Edge."""
api: Tami4EdgeAPI = hass.data[DOMAIN][entry.entry_id][API]
api = entry.runtime_data.api
buttons: list[Tami4EdgeBaseEntity] = [Tami4EdgeButton(api, BOIL_WATER_BUTTON)]
device = await hass.async_add_executor_job(api.get_device)
-2
View File
@@ -3,5 +3,3 @@
DOMAIN = "tami4"
CONF_PHONE = "phone"
CONF_REFRESH_TOKEN = "refresh_token"
API = "api"
COORDINATOR = "coordinator"
@@ -13,6 +13,8 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
_LOGGER = logging.getLogger(__name__)
type Tami4ConfigEntry = ConfigEntry[Tami4EdgeCoordinator]
@dataclass
class FlattenedWaterQuality:
@@ -37,10 +39,10 @@ class FlattenedWaterQuality:
class Tami4EdgeCoordinator(DataUpdateCoordinator[FlattenedWaterQuality]):
"""Tami4Edge water quality coordinator."""
config_entry: ConfigEntry
config_entry: Tami4ConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, api: Tami4EdgeAPI
self, hass: HomeAssistant, config_entry: Tami4ConfigEntry, api: Tami4EdgeAPI
) -> None:
"""Initialize the water quality coordinator."""
super().__init__(
@@ -50,12 +52,12 @@ class Tami4EdgeCoordinator(DataUpdateCoordinator[FlattenedWaterQuality]):
name="Tami4Edge water quality coordinator",
update_interval=timedelta(minutes=60),
)
self._api = api
self.api = api
async def _async_update_data(self) -> FlattenedWaterQuality:
"""Fetch data from the API endpoint."""
try:
device = await self.hass.async_add_executor_job(self._api.get_device)
device = await self.hass.async_add_executor_job(self.api.get_device)
return FlattenedWaterQuality(device.water_quality)
except exceptions.APIRequestFailedException as ex:

Some files were not shown because too many files have changed in this diff Show More