mirror of
https://github.com/home-assistant/core.git
synced 2025-11-05 08:59:57 +00:00
Compare commits
106 Commits
2025.10.1
...
select-sel
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a65458edf8 | ||
|
|
10b56e4258 | ||
|
|
0ff2597957 | ||
|
|
026b28e962 | ||
|
|
9a1e67294a | ||
|
|
cdb448a5cc | ||
|
|
ab80e726e2 | ||
|
|
2d5d0f67b2 | ||
|
|
d4100b6096 | ||
|
|
955e854d77 | ||
|
|
0c37f88c49 | ||
|
|
48167eeb9c | ||
|
|
24177197f7 | ||
|
|
863fc0ba97 | ||
|
|
9f7b229d02 | ||
|
|
ffd909f3d9 | ||
|
|
1ebf096a33 | ||
|
|
96d51965e5 | ||
|
|
04b510b020 | ||
|
|
c9a301d50e | ||
|
|
b304bd1a8b | ||
|
|
b99525b231 | ||
|
|
634db13990 | ||
|
|
ad51a77989 | ||
|
|
3348a39e8a | ||
|
|
81c2e356ec | ||
|
|
de6c3512d2 | ||
|
|
36dc1e938a | ||
|
|
07a78cf6f7 | ||
|
|
eaa673e0c3 | ||
|
|
f2c4ca081f | ||
|
|
e3d707f0b4 | ||
|
|
fb93fed2e5 | ||
|
|
95dfc2f23d | ||
|
|
408df2093a | ||
|
|
f32bf0cc3e | ||
|
|
dbbe3145b6 | ||
|
|
f8bf3ea2ef | ||
|
|
053bd31d43 | ||
|
|
1aefc3f37a | ||
|
|
3de955d9ce | ||
|
|
0ff88fd366 | ||
|
|
eb84020773 | ||
|
|
4bbfea3c7c | ||
|
|
63d4fb7558 | ||
|
|
953895cd81 | ||
|
|
a6c3f4efc0 | ||
|
|
11e880d034 | ||
|
|
e4d6bdb398 | ||
|
|
6ced1783e3 | ||
|
|
8051f78d10 | ||
|
|
b724176b23 | ||
|
|
fdca16ea92 | ||
|
|
f8fd8b432a | ||
|
|
9148ae70ce | ||
|
|
447cb26d28 | ||
|
|
2af36465f6 | ||
|
|
d5f7265424 | ||
|
|
cc16af7f2d | ||
|
|
7a4d75bc44 | ||
|
|
ec0380fd3b | ||
|
|
b17cc71dfb | ||
|
|
89b327ed7b | ||
|
|
9bf361a1b8 | ||
|
|
d11c171c75 | ||
|
|
c523c45d17 | ||
|
|
c1b9c0e1b6 | ||
|
|
487b9ff03e | ||
|
|
ec62b0cdfb | ||
|
|
6d0470064f | ||
|
|
7450b3fd1a | ||
|
|
5b70910d77 | ||
|
|
52de5ff5ff | ||
|
|
c4389a1679 | ||
|
|
35faaa6cae | ||
|
|
3c0b13975a | ||
|
|
bc88696339 | ||
|
|
8f99c3f64a | ||
|
|
88016d96d4 | ||
|
|
47df73b18f | ||
|
|
1c12d2b8cd | ||
|
|
eb38837a8c | ||
|
|
159c7fbfd1 | ||
|
|
7ee31f0884 | ||
|
|
0c5e12571a | ||
|
|
9db973217f | ||
|
|
cf1a745283 | ||
|
|
834e3f1963 | ||
|
|
3f8f7573c9 | ||
|
|
0ae272f1f6 | ||
|
|
8774295e2e | ||
|
|
0c8d2594ef | ||
|
|
205bd2676b | ||
|
|
25849fd9cc | ||
|
|
7d6eac9ff7 | ||
|
|
31017ebc98 | ||
|
|
724a7b0ecc | ||
|
|
91e13d447a | ||
|
|
7c8ad9d535 | ||
|
|
9cd3ab853d | ||
|
|
0b0f8c5829 | ||
|
|
ae7bc7fb1b | ||
|
|
09750872b5 | ||
|
|
076e51017b | ||
|
|
95e7b00996 | ||
|
|
ddecf1ac21 |
62
.github/workflows/ci.yaml
vendored
62
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -279,7 +279,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -309,7 +309,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -318,7 +318,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -349,7 +349,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -358,7 +358,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -389,7 +389,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -398,7 +398,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -505,7 +505,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -513,7 +513,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -525,7 +525,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Check if apt cache exists
|
||||
id: cache-apt-check
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
lookup-only: ${{ steps.cache-venv.outputs.cache-hit == 'true' }}
|
||||
path: |
|
||||
@@ -570,7 +570,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -622,7 +622,7 @@ jobs:
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -651,7 +651,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -684,7 +684,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -741,7 +741,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -784,7 +784,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -831,7 +831,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -883,7 +883,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -891,7 +891,7 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -935,7 +935,7 @@ jobs:
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -967,7 +967,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1009,7 +1009,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1042,7 +1042,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1156,7 +1156,7 @@ jobs:
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1189,7 +1189,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1310,7 +1310,7 @@ jobs:
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1345,7 +1345,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1485,7 +1485,7 @@ jobs:
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
uses: actions/cache/restore@v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
@@ -1518,7 +1518,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
uses: github/codeql-action/init@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
||||
uses: github/codeql-action/analyze@303c0aef88fc2fe5ff6d63d3b1596bfd83dfa1f9 # v3.30.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "eltako",
|
||||
"name": "Eltako",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "konnected",
|
||||
"name": "Konnected",
|
||||
"integrations": ["konnected", "konnected_esphome"]
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "level",
|
||||
"name": "Level",
|
||||
"iot_standards": ["matter"]
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -14,7 +15,7 @@ from airos.exceptions import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
@@ -24,6 +25,11 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||
from .coordinator import AirOS8
|
||||
@@ -54,50 +60,107 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.airos_device: AirOS8
|
||||
self.errors: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
"""Handle the manual input of host and credentials."""
|
||||
self.errors = {}
|
||||
if user_input is not None:
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(
|
||||
self.hass,
|
||||
verify_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=user_input[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
validated_info = await self._validate_and_get_device_info(user_input)
|
||||
if validated_info:
|
||||
return self.async_create_entry(
|
||||
title=airos_data.host.hostname, data=user_input
|
||||
title=validated_info["title"],
|
||||
data=validated_info["data"],
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
|
||||
)
|
||||
|
||||
async def _validate_and_get_device_info(
|
||||
self, config_data: dict[str, Any]
|
||||
) -> dict[str, Any] | None:
|
||||
"""Validate user input with the device API."""
|
||||
# By default airOS 8 comes with self-signed SSL certificates,
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(
|
||||
self.hass,
|
||||
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||
)
|
||||
|
||||
airos_device = AirOS8(
|
||||
host=config_data[CONF_HOST],
|
||||
username=config_data[CONF_USERNAME],
|
||||
password=config_data[CONF_PASSWORD],
|
||||
session=session,
|
||||
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||
)
|
||||
try:
|
||||
await airos_device.login()
|
||||
airos_data = await airos_device.status()
|
||||
|
||||
except (
|
||||
AirOSConnectionSetupError,
|
||||
AirOSDeviceConnectionError,
|
||||
):
|
||||
self.errors["base"] = "cannot_connect"
|
||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||
self.errors["base"] = "invalid_auth"
|
||||
except AirOSKeyDataMissingError:
|
||||
self.errors["base"] = "key_data_missing"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception during credential validation")
|
||||
self.errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(airos_data.derived.mac)
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
else:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return {"title": airos_data.host.hostname, "data": config_data}
|
||||
|
||||
return None
|
||||
|
||||
async def async_step_reauth(
|
||||
self,
|
||||
user_input: Mapping[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm(user_input)
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self,
|
||||
user_input: Mapping[str, Any],
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
self.errors = {}
|
||||
|
||||
if user_input:
|
||||
validate_data = {**self._get_reauth_entry().data, **user_input}
|
||||
if await self._validate_and_get_device_info(config_data=validate_data):
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates=validate_data,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
)
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=self.errors,
|
||||
)
|
||||
|
||||
@@ -14,7 +14,7 @@ from airos.exceptions import (
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, SCAN_INTERVAL
|
||||
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
return await self.airos_device.status()
|
||||
except (AirOSConnectionAuthenticationError,) as err:
|
||||
except AirOSConnectionAuthenticationError as err:
|
||||
_LOGGER.exception("Error authenticating with airOS device")
|
||||
raise ConfigEntryError(
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
||||
except (
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.4"]
|
||||
"requirements": ["airos==0.5.3"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,14 @@
|
||||
"config": {
|
||||
"flow_title": "Ubiquiti airOS device",
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
@@ -34,7 +42,9 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -2,14 +2,17 @@
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator
|
||||
from .coordinator import AirtouchDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
|
||||
"""Set up AirTouch4 from a config entry."""
|
||||
@@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
|
||||
info = airtouch.GetAcs()
|
||||
if not info:
|
||||
raise ConfigEntryNotReady
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch)
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
|
||||
@@ -2,34 +2,26 @@
|
||||
|
||||
import logging
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
from airtouch4pyapi.airtouch import AirTouchStatus
|
||||
|
||||
from homeassistant.components.climate import SCAN_INTERVAL
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Airtouch data."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
|
||||
) -> None:
|
||||
def __init__(self, hass, airtouch):
|
||||
"""Initialize global Airtouch data updater."""
|
||||
self.airtouch = airtouch
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -22,17 +22,6 @@ class OAuth2FlowHandler(
|
||||
VERSION = CONFIG_FLOW_VERSION
|
||||
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Check we have the cloud integration set up."""
|
||||
if "cloud" not in self.hass.config.components:
|
||||
return self.async_abort(
|
||||
reason="cloud_not_enabled",
|
||||
description_placeholders={"default_config": "default_config"},
|
||||
)
|
||||
return await super().async_step_user(user_input)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -24,8 +24,7 @@
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.",
|
||||
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
|
||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
|
||||
},
|
||||
"create_entry": {
|
||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
"requirements": ["aioamazondevices==6.2.6"]
|
||||
}
|
||||
|
||||
@@ -505,7 +505,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
@@ -513,8 +513,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
||||
integration_configs: dict[str, AnalyticsModifications] = {}
|
||||
|
||||
removed_devices: set[str] = set()
|
||||
|
||||
# Get device list
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
@@ -527,10 +525,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
|
||||
removed_devices.add(device_entry.id)
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
|
||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||
@@ -557,7 +551,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
for domain, integration_info in integration_inputs.items()
|
||||
if (integration := integrations.get(domain)) is not None
|
||||
and integration.is_built_in
|
||||
and integration.manifest.get("integration_type") in ("device", "hub")
|
||||
and integration.integration_type in ("device", "hub")
|
||||
}
|
||||
|
||||
# Call integrations that implement the analytics platform
|
||||
@@ -620,12 +614,11 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
device_config = integration_config.devices.get(device_id, device_config)
|
||||
|
||||
if device_config.remove:
|
||||
removed_devices.add(device_id)
|
||||
continue
|
||||
|
||||
device_entry = dev_reg.devices[device_id]
|
||||
|
||||
device_id_mapping[device_id] = (integration_domain, len(devices_info))
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
@@ -676,7 +669,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
|
||||
entity_state = hass.states.get(entity_id)
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
@@ -697,19 +690,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (device_id_ := entity_entry.device_id) is not None:
|
||||
if device_id_ in removed_devices:
|
||||
# The device was removed, so we remove the entity too
|
||||
continue
|
||||
|
||||
if (
|
||||
new_device_id := device_id_mapping.get(device_id_)
|
||||
) is not None and (new_device_id[0] == integration_domain):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
continue
|
||||
|
||||
entities_info.append(entity_info)
|
||||
if (
|
||||
((device_id_ := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
|
||||
@@ -3,16 +3,12 @@ beolink_allstandby:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
@@ -37,8 +33,6 @@ beolink_join:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
@@ -71,16 +65,12 @@ beolink_leave:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
|
||||
@@ -272,13 +272,6 @@ async def async_setup_entry(
|
||||
observations: list[ConfigType] = [
|
||||
dict(subentry.data) for subentry in config_entry.subentries.values()
|
||||
]
|
||||
|
||||
for observation in observations:
|
||||
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
|
||||
observation[CONF_VALUE_TEMPLATE] = Template(
|
||||
observation[CONF_VALUE_TEMPLATE], hass
|
||||
)
|
||||
|
||||
prior: float = config[CONF_PRIOR]
|
||||
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
|
||||
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.1.1"],
|
||||
"requirements": ["hass-nabucasa==1.1.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"]
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SOURCE: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_handle_source_entity_changes(
|
||||
@@ -46,15 +47,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.SENSOR,))
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, (Platform.SENSOR,))
|
||||
|
||||
@@ -140,6 +140,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 4
|
||||
|
||||
@@ -6,12 +6,13 @@ from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_DOMAIN
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
ConditionCheckerType,
|
||||
ConditionConfig,
|
||||
trace_condition_function,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -55,19 +56,40 @@ class DeviceAutomationConditionProtocol(Protocol):
|
||||
class DeviceCondition(Condition):
|
||||
"""Device condition."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Initialize condition."""
|
||||
self._config = config
|
||||
self._hass = hass
|
||||
_hass: HomeAssistant
|
||||
_config: ConfigType
|
||||
|
||||
@classmethod
|
||||
async def async_validate_complete_config(
|
||||
cls, hass: HomeAssistant, complete_config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate complete config."""
|
||||
complete_config = await async_validate_device_automation_config(
|
||||
hass,
|
||||
complete_config,
|
||||
cv.DEVICE_CONDITION_SCHEMA,
|
||||
DeviceAutomationType.CONDITION,
|
||||
)
|
||||
# Since we don't want to migrate device conditions to a new format
|
||||
# we just pass the entire config as options.
|
||||
complete_config[CONF_OPTIONS] = complete_config.copy()
|
||||
return complete_config
|
||||
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate device condition config."""
|
||||
return await async_validate_device_automation_config(
|
||||
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
||||
)
|
||||
"""Validate config.
|
||||
|
||||
This is here just to satisfy the abstract class interface. It is never called.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
||||
"""Initialize condition."""
|
||||
self._hass = hass
|
||||
assert config.options is not None
|
||||
self._config = config.options
|
||||
|
||||
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||
"""Test a device condition."""
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from ipaddress import IPv4Address, IPv6Address
|
||||
import logging
|
||||
@@ -89,8 +88,8 @@ class WanIpSensor(SensorEntity):
|
||||
self._attr_name = "IPv6" if ipv6 else None
|
||||
self._attr_unique_id = f"{hostname}_{ipv6}"
|
||||
self.hostname = hostname
|
||||
self.port = port
|
||||
self._resolver = resolver
|
||||
self.resolver = aiodns.DNSResolver(tcp_port=port, udp_port=port)
|
||||
self.resolver.nameservers = [resolver]
|
||||
self.querytype: Literal["A", "AAAA"] = "AAAA" if ipv6 else "A"
|
||||
self._retries = DEFAULT_RETRIES
|
||||
self._attr_extra_state_attributes = {
|
||||
@@ -104,26 +103,14 @@ class WanIpSensor(SensorEntity):
|
||||
model=aiodns.__version__,
|
||||
name=name,
|
||||
)
|
||||
self.resolver: aiodns.DNSResolver
|
||||
self.create_dns_resolver()
|
||||
|
||||
def create_dns_resolver(self) -> None:
|
||||
"""Create the DNS resolver."""
|
||||
self.resolver = aiodns.DNSResolver(tcp_port=self.port, udp_port=self.port)
|
||||
self.resolver.nameservers = [self._resolver]
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the current DNS IP address for hostname."""
|
||||
if self.resolver._closed: # noqa: SLF001
|
||||
self.create_dns_resolver()
|
||||
response = None
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except TimeoutError:
|
||||
await self.resolver.close()
|
||||
response = await self.resolver.query(self.hostname, self.querytype)
|
||||
except DNSError as err:
|
||||
_LOGGER.warning("Exception while resolving host: %s", err)
|
||||
response = None
|
||||
|
||||
if response:
|
||||
sorted_ips = sort_ips(
|
||||
|
||||
@@ -116,11 +116,7 @@ class EbusdData:
|
||||
try:
|
||||
_LOGGER.debug("Opening socket to ebusd %s", name)
|
||||
command_result = ebusdpy.write(self._address, self._circuit, name, value)
|
||||
if (
|
||||
command_result is not None
|
||||
and "done" not in command_result
|
||||
and "empty" not in command_result
|
||||
):
|
||||
if command_result is not None and "done" not in command_result:
|
||||
_LOGGER.warning("Write command failed: %s", name)
|
||||
except RuntimeError as err:
|
||||
_LOGGER.error(err)
|
||||
|
||||
@@ -176,7 +176,7 @@
|
||||
"description": "Sets the participating sensors for a climate program.",
|
||||
"fields": {
|
||||
"preset_mode": {
|
||||
"name": "Climate program",
|
||||
"name": "Climate Name",
|
||||
"description": "Name of the climate program to set the sensors active on.\nDefaults to currently active program."
|
||||
},
|
||||
"device_ids": {
|
||||
@@ -188,7 +188,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"invalid_preset": {
|
||||
"message": "Invalid climate program, available options are: {options}"
|
||||
"message": "Invalid climate name, available options are: {options}"
|
||||
},
|
||||
"invalid_sensor": {
|
||||
"message": "Invalid sensor for thermostat, available options are: {options}"
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"dependencies": ["webhook"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecowitt",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aioecowitt==2025.9.2"]
|
||||
"requirements": ["aioecowitt==2025.9.1"]
|
||||
}
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
"""Analytics platform."""
|
||||
|
||||
from homeassistant.components.analytics import AnalyticsInput, AnalyticsModifications
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_modify_analytics(
|
||||
hass: HomeAssistant, analytics_input: AnalyticsInput
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
return AnalyticsModifications(remove=True)
|
||||
@@ -10,7 +10,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Filter from a config entry."""
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -18,8 +17,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Filter config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -246,6 +246,7 @@ class FilterConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
||||
@@ -46,9 +46,6 @@ async def async_get_config_entry_diagnostics(
|
||||
}
|
||||
for _, device in avm_wrapper.devices.items()
|
||||
],
|
||||
"cpu_temperatures": await hass.async_add_executor_job(
|
||||
avm_wrapper.fritz_status.get_cpu_temperatures
|
||||
),
|
||||
"wan_link_properties": await avm_wrapper.async_get_wan_link_properties(),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251001.0"]
|
||||
"requirements": ["home-assistant-frontend==20250926.0"]
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
load_url:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
url:
|
||||
example: "https://home-assistant.io"
|
||||
required: true
|
||||
@@ -10,10 +12,12 @@ load_url:
|
||||
text:
|
||||
|
||||
set_config:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
key:
|
||||
example: "motionSensitivity"
|
||||
required: true
|
||||
@@ -26,12 +30,14 @@ set_config:
|
||||
text:
|
||||
|
||||
start_application:
|
||||
target:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
application:
|
||||
example: "de.ozerov.fully"
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fully_kiosk
|
||||
|
||||
@@ -147,6 +147,10 @@
|
||||
"name": "Load URL",
|
||||
"description": "Loads a URL on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "Device ID",
|
||||
"description": "The target device for this action."
|
||||
},
|
||||
"url": {
|
||||
"name": "[%key:common::config_flow::data::url%]",
|
||||
"description": "URL to load."
|
||||
@@ -157,6 +161,10 @@
|
||||
"name": "Set configuration",
|
||||
"description": "Sets a configuration parameter on Fully Kiosk Browser.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
},
|
||||
"key": {
|
||||
"name": "Key",
|
||||
"description": "Configuration parameter to set."
|
||||
@@ -174,6 +182,10 @@
|
||||
"application": {
|
||||
"name": "Application",
|
||||
"description": "Package name of the application to start."
|
||||
},
|
||||
"device_id": {
|
||||
"name": "[%key:component::fully_kiosk::services::load_url::fields::device_id::name%]",
|
||||
"description": "[%key:component::fully_kiosk::services::load_url::fields::device_id::description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_HUMIDIFIER: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the humidifer,
|
||||
@@ -140,6 +141,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SENSOR: data["entity_id"]},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_entity_registry_updated_event(
|
||||
@@ -148,7 +150,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, (Platform.HUMIDIFIER,))
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -186,11 +187,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
||||
@@ -96,6 +96,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
||||
@@ -35,6 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_HEATER: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the heater, but
|
||||
@@ -67,6 +68,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_SENSOR: data["entity_id"]},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_entity_registry_updated_event(
|
||||
@@ -75,7 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
@@ -113,11 +114,6 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
@@ -104,6 +104,7 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
||||
@@ -620,13 +620,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||
"""Create the GenerateContentConfig for the LLM."""
|
||||
options = self.subentry.data
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
thinking_config: ThinkingConfig | None = None
|
||||
if model.startswith("models/gemini-2.5") and not model.endswith(
|
||||
("tts", "image", "image-preview")
|
||||
):
|
||||
thinking_config = ThinkingConfig(include_thoughts=True)
|
||||
|
||||
return GenerateContentConfig(
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
top_k=options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
@@ -659,7 +652,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
),
|
||||
),
|
||||
],
|
||||
thinking_config=thinking_config,
|
||||
thinking_config=ThinkingConfig(include_thoughts=True),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
set_vacation:
|
||||
target:
|
||||
device:
|
||||
integration: google_mail
|
||||
entity:
|
||||
integration: google_mail
|
||||
fields:
|
||||
|
||||
@@ -141,15 +141,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
entry, (entry.options["group_type"],)
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
||||
@@ -329,6 +329,7 @@ class GroupConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
@callback
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.3.3"],
|
||||
"requirements": ["aiohasupervisor==0.3.3b0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -65,6 +65,7 @@ async def async_setup_entry(
|
||||
entry,
|
||||
options={**entry.options, CONF_ENTITY_ID: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
async def source_entity_removed() -> None:
|
||||
# The source entity has been removed, we remove the config entry because
|
||||
@@ -86,7 +87,6 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -130,8 +130,3 @@ async def async_unload_entry(
|
||||
) -> bool:
|
||||
"""Unload History stats config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -162,6 +162,7 @@ class HistoryStatsConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
||||
@@ -22,6 +22,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.19.0"],
|
||||
"requirements": ["aiohomeconnect==0.20.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -32,15 +32,12 @@ set_location:
|
||||
stop:
|
||||
toggle:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_on:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
turn_off:
|
||||
target:
|
||||
entity: {}
|
||||
|
||||
update_entity:
|
||||
fields:
|
||||
@@ -53,8 +50,6 @@ update_entity:
|
||||
reload_custom_templates:
|
||||
reload_config_entry:
|
||||
target:
|
||||
entity: {}
|
||||
device: {}
|
||||
fields:
|
||||
entry_id:
|
||||
advanced: true
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.components.homeassistant_hardware import firmware_config_flow
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
@@ -67,7 +66,6 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
|
||||
|
||||
context: ConfigFlowContext
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
||||
@@ -27,12 +27,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -75,10 +69,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -133,21 +129,14 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"flow_title": "{model}",
|
||||
"step": {
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"pick_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]",
|
||||
@@ -169,10 +158,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -224,10 +215,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -157,7 +156,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Connect ZBT-2 firmware update entity."""
|
||||
|
||||
bootloader_reset_methods = [ResetTarget.RTS_DTR]
|
||||
bootloader_reset_type = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -39,7 +39,6 @@ from .util import (
|
||||
FirmwareInfo,
|
||||
OwningAddon,
|
||||
OwningIntegration,
|
||||
ResetTarget,
|
||||
async_flash_silabs_firmware,
|
||||
get_otbr_addon_manager,
|
||||
guess_firmware_info,
|
||||
@@ -62,13 +61,6 @@ class PickedFirmwareType(StrEnum):
|
||||
ZIGBEE = "zigbee"
|
||||
|
||||
|
||||
class ZigbeeFlowStrategy(StrEnum):
|
||||
"""Zigbee setup strategies that can be picked."""
|
||||
|
||||
ADVANCED = "advanced"
|
||||
RECOMMENDED = "recommended"
|
||||
|
||||
|
||||
class ZigbeeIntegration(StrEnum):
|
||||
"""Zigbee integrations that can be picked."""
|
||||
|
||||
@@ -80,10 +72,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Base flow to install firmware."""
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
|
||||
BOOTLOADER_RESET_METHODS: list[ResetTarget] = [] # Default, subclasses may override
|
||||
|
||||
_picked_firmware_type: PickedFirmwareType
|
||||
_zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Instantiate base flow."""
|
||||
@@ -158,6 +147,34 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
async def _probe_firmware_info(
|
||||
self,
|
||||
probe_methods: tuple[ApplicationType, ...] = (
|
||||
# We probe in order of frequency: Zigbee, Thread, then multi-PAN
|
||||
ApplicationType.GECKO_BOOTLOADER,
|
||||
ApplicationType.EZSP,
|
||||
ApplicationType.SPINEL,
|
||||
ApplicationType.CPC,
|
||||
),
|
||||
) -> bool:
|
||||
"""Probe the firmware currently on the device."""
|
||||
assert self._device is not None
|
||||
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(
|
||||
self._device,
|
||||
probe_methods=probe_methods,
|
||||
)
|
||||
|
||||
return (
|
||||
self._probed_firmware_info is not None
|
||||
and self._probed_firmware_info.firmware_type
|
||||
in (
|
||||
ApplicationType.EZSP,
|
||||
ApplicationType.SPINEL,
|
||||
ApplicationType.CPC,
|
||||
)
|
||||
)
|
||||
|
||||
async def _install_firmware_step(
|
||||
self,
|
||||
fw_update_url: str,
|
||||
@@ -211,6 +228,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
) -> None:
|
||||
"""Install firmware."""
|
||||
if not await self._probe_firmware_info():
|
||||
raise AbortFlow(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
assert self._device is not None
|
||||
|
||||
# Keep track of the firmware we're working with, for error messages
|
||||
@@ -219,8 +242,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(self._device)
|
||||
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type != expected_installed_firmware_type
|
||||
)
|
||||
@@ -272,12 +293,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(reason="firmware_download_failed") from err
|
||||
|
||||
self._probed_firmware_info = await async_flash_silabs_firmware(
|
||||
await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
bootloader_reset_type=None,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
@@ -285,6 +306,15 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
|
||||
async def _configure_and_start_otbr_addon(self) -> None:
|
||||
"""Configure and start the OTBR addon."""
|
||||
|
||||
# Before we start the addon, confirm that the correct firmware is running
|
||||
# and populate `self._probed_firmware_info` with the correct information
|
||||
if not await self._probe_firmware_info(probe_methods=(ApplicationType.SPINEL,)):
|
||||
raise AbortFlow(
|
||||
"unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
@@ -365,14 +395,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
) -> ConfigFlowResult:
|
||||
"""Select recommended installation type."""
|
||||
self._zigbee_integration = ZigbeeIntegration.ZHA
|
||||
self._zigbee_flow_strategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
return await self._async_continue_picked_firmware()
|
||||
|
||||
async def async_step_zigbee_intent_custom(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Select custom installation type."""
|
||||
self._zigbee_flow_strategy = ZigbeeFlowStrategy.ADVANCED
|
||||
return await self.async_step_zigbee_integration()
|
||||
|
||||
async def async_step_zigbee_integration(
|
||||
@@ -406,12 +434,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
if self._picked_firmware_type == PickedFirmwareType.ZIGBEE:
|
||||
return await self.async_step_install_zigbee_firmware()
|
||||
|
||||
return await self.async_step_install_thread_firmware()
|
||||
return await self.async_step_prepare_thread_installation()
|
||||
|
||||
async def async_step_finish_thread_installation(
|
||||
async def async_step_prepare_thread_installation(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish Thread installation by starting the OTBR addon."""
|
||||
"""Prepare for Thread installation by stopping the OTBR addon if needed."""
|
||||
if not is_hassio(self.hass):
|
||||
return self.async_abort(
|
||||
reason="not_hassio_thread",
|
||||
@@ -421,12 +449,22 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
# Stop the addon before continuing to flash firmware
|
||||
await otbr_manager.async_stop_addon()
|
||||
|
||||
return await self.async_step_install_thread_firmware()
|
||||
|
||||
async def async_step_finish_thread_installation(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish Thread installation by starting the OTBR addon."""
|
||||
otbr_manager = get_otbr_addon_manager(self.hass)
|
||||
addon_info = await self._async_get_addon_info(otbr_manager)
|
||||
|
||||
if addon_info.state == AddonState.NOT_INSTALLED:
|
||||
return await self.async_step_install_otbr_addon()
|
||||
|
||||
if addon_info.state == AddonState.RUNNING:
|
||||
await otbr_manager.async_stop_addon()
|
||||
|
||||
return await self.async_step_start_otbr_addon()
|
||||
|
||||
async def async_step_pick_firmware_zigbee(
|
||||
@@ -463,6 +501,12 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
assert self._device is not None
|
||||
assert self._hardware_name is not None
|
||||
|
||||
if not await self._probe_firmware_info(probe_methods=(ApplicationType.EZSP,)):
|
||||
return self.async_abort(
|
||||
reason="unsupported_firmware",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
)
|
||||
|
||||
if self._zigbee_integration == ZigbeeIntegration.OTHER:
|
||||
return self._async_flow_finished()
|
||||
|
||||
@@ -477,7 +521,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"flow_control": "hardware",
|
||||
},
|
||||
"radio_type": "ezsp",
|
||||
"flow_strategy": self._zigbee_flow_strategy,
|
||||
},
|
||||
)
|
||||
return self._continue_zha_flow(result)
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"universal-silabs-flasher==0.0.35",
|
||||
"universal-silabs-flasher==0.0.32",
|
||||
"ha-silabs-firmware-client==0.2.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -23,16 +23,12 @@
|
||||
"description": "Your {model} is now a Zigbee coordinator and will be shown as discovered by the Zigbee Home Automation integration."
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "Configuring Thread"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "Updating adapter"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "Updating adapter"
|
||||
"title": "Installing OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is being installed."
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "Configuring Thread"
|
||||
"title": "Starting OpenThread Border Router add-on",
|
||||
"description": "The OpenThread Border Router (OTBR) add-on is now starting."
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "Failed to set up OpenThread Border Router",
|
||||
@@ -76,9 +72,7 @@
|
||||
"fw_install_failed": "{firmware_name} firmware failed to install, check Home Assistant logs for more information."
|
||||
},
|
||||
"progress": {
|
||||
"install_firmware": "Installing {firmware_name} firmware.\n\nDo not make any changes to your hardware or software until this finishes.",
|
||||
"install_otbr_addon": "Installing add-on",
|
||||
"start_otbr_addon": "Starting add-on"
|
||||
"install_firmware": "Please wait while {firmware_name} firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -22,12 +22,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import FirmwareUpdateCoordinator
|
||||
from .helpers import async_register_firmware_info_callback
|
||||
from .util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
async_flash_silabs_firmware,
|
||||
)
|
||||
from .util import ApplicationType, FirmwareInfo, async_flash_silabs_firmware
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -86,7 +81,7 @@ class BaseFirmwareUpdateEntity(
|
||||
|
||||
# Subclasses provide the mapping between firmware types and entity descriptions
|
||||
entity_description: FirmwareUpdateEntityDescription
|
||||
bootloader_reset_methods: list[ResetTarget] = []
|
||||
bootloader_reset_type: str | None = None
|
||||
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
@@ -273,7 +268,7 @@ class BaseFirmwareUpdateEntity(
|
||||
device=self._current_device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.bootloader_reset_methods,
|
||||
bootloader_reset_type=self.bootloader_reset_type,
|
||||
progress_callback=self._update_progress,
|
||||
)
|
||||
finally:
|
||||
|
||||
@@ -4,16 +4,13 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Iterable, Sequence
|
||||
from collections.abc import AsyncIterator, Callable, Iterable
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
from universal_silabs_flasher.const import (
|
||||
ApplicationType as FlasherApplicationType,
|
||||
ResetTarget as FlasherResetTarget,
|
||||
)
|
||||
from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType
|
||||
from universal_silabs_flasher.firmware import parse_firmware_image
|
||||
from universal_silabs_flasher.flasher import Flasher
|
||||
|
||||
@@ -45,9 +42,9 @@ class ApplicationType(StrEnum):
|
||||
"""Application type running on a device."""
|
||||
|
||||
GECKO_BOOTLOADER = "bootloader"
|
||||
CPC = "cpc"
|
||||
EZSP = "ezsp"
|
||||
SPINEL = "spinel"
|
||||
CPC = "cpc"
|
||||
ROUTER = "router"
|
||||
|
||||
@classmethod
|
||||
@@ -62,18 +59,6 @@ class ApplicationType(StrEnum):
|
||||
return FlasherApplicationType(self.value)
|
||||
|
||||
|
||||
class ResetTarget(StrEnum):
|
||||
"""Methods to reset a device into bootloader mode."""
|
||||
|
||||
RTS_DTR = "rts_dtr"
|
||||
BAUDRATE = "baudrate"
|
||||
YELLOW = "yellow"
|
||||
|
||||
def as_flasher_reset_target(self) -> FlasherResetTarget:
|
||||
"""Convert the reset target enum into one compatible with USF."""
|
||||
return FlasherResetTarget(self.value)
|
||||
|
||||
|
||||
@singleton(OTBR_ADDON_MANAGER_DATA)
|
||||
@callback
|
||||
def get_otbr_addon_manager(hass: HomeAssistant) -> WaitingAddonManager:
|
||||
@@ -357,7 +342,7 @@ async def async_flash_silabs_firmware(
|
||||
device: str,
|
||||
fw_data: bytes,
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
bootloader_reset_methods: Sequence[ResetTarget] = (),
|
||||
bootloader_reset_type: str | None = None,
|
||||
progress_callback: Callable[[int, int], None] | None = None,
|
||||
) -> FirmwareInfo:
|
||||
"""Flash firmware to the SiLabs device."""
|
||||
@@ -374,9 +359,7 @@ async def async_flash_silabs_firmware(
|
||||
ApplicationType.SPINEL.as_flasher_application_type(),
|
||||
ApplicationType.CPC.as_flasher_application_type(),
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
bootloader_reset=bootloader_reset_type,
|
||||
)
|
||||
|
||||
async with AsyncExitStack() as stack:
|
||||
|
||||
@@ -27,12 +27,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -75,10 +69,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -133,10 +129,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
@@ -163,16 +158,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -224,10 +215,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -168,8 +168,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""SkyConnect firmware update entity."""
|
||||
|
||||
# The ZBT-1 does not have a hardware bootloader trigger
|
||||
bootloader_reset_methods = []
|
||||
bootloader_reset_type = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -27,8 +27,6 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
probe_silabs_firmware_info,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_HARDWARE,
|
||||
@@ -84,8 +82,6 @@ else:
|
||||
class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Yellow firmware methods."""
|
||||
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.YELLOW]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -145,10 +141,8 @@ class HomeAssistantYellowConfigFlow(
|
||||
self, data: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
assert self._device is not None
|
||||
|
||||
# We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(self._device)
|
||||
await self._probe_firmware_info()
|
||||
|
||||
# Kick off ZHA hardware discovery automatically if Zigbee firmware is running
|
||||
if (
|
||||
|
||||
@@ -35,12 +35,6 @@
|
||||
"install_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::install_addon::title%]"
|
||||
},
|
||||
"install_thread_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_thread_firmware::title%]"
|
||||
},
|
||||
"install_zigbee_firmware": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_firmware::title%]"
|
||||
},
|
||||
"notify_channel_change": {
|
||||
"title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::notify_channel_change::description%]"
|
||||
@@ -98,10 +92,12 @@
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]"
|
||||
},
|
||||
"install_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]"
|
||||
},
|
||||
"start_otbr_addon": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]"
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]",
|
||||
"description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]"
|
||||
},
|
||||
"otbr_failed": {
|
||||
"title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]",
|
||||
@@ -158,10 +154,9 @@
|
||||
},
|
||||
"progress": {
|
||||
"install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]",
|
||||
"install_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_otbr_addon%]",
|
||||
"start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::start_otbr_addon%]"
|
||||
"start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]",
|
||||
"install_firmware": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_firmware%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -174,7 +173,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Yellow firmware update entity."""
|
||||
|
||||
bootloader_reset_methods = [ResetTarget.YELLOW] # Triggers a GPIO reset
|
||||
bootloader_reset_type = "yellow" # Triggers a GPIO reset
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.19"],
|
||||
"requirements": ["aiohomekit==3.2.18"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
||||
@@ -145,11 +145,7 @@ class HueMotionSensor(HueBaseEntity, BinarySensorEntity):
|
||||
if not self.resource.enabled:
|
||||
# Force None (unknown) if the sensor is set to disabled in Hue
|
||||
return None
|
||||
if not (motion_feature := self.resource.motion):
|
||||
return None
|
||||
if motion_feature.motion_report is not None:
|
||||
return motion_feature.motion_report.motion
|
||||
return motion_feature.motion
|
||||
return self.resource.motion.value
|
||||
|
||||
|
||||
# pylint: disable-next=hass-enforce-class-module
|
||||
|
||||
@@ -8,16 +8,13 @@ from idasen_ha import Desk
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator]
|
||||
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
|
||||
class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Class to manage updates for the Idasen Desk."""
|
||||
@@ -36,22 +33,9 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
hass, _LOGGER, config_entry=config_entry, name=config_entry.title
|
||||
)
|
||||
self.address = address
|
||||
self.desk = Desk(self._async_handle_update)
|
||||
|
||||
self._expected_connected = False
|
||||
self._height: int | None = None
|
||||
|
||||
@callback
|
||||
def async_update_data() -> None:
|
||||
self.async_set_updated_data(self._height)
|
||||
|
||||
self._debouncer = Debouncer(
|
||||
hass=self.hass,
|
||||
logger=_LOGGER,
|
||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||
immediate=True,
|
||||
function=async_update_data,
|
||||
)
|
||||
self.desk = Desk(self.async_set_updated_data)
|
||||
|
||||
async def async_connect(self) -> bool:
|
||||
"""Connect to desk."""
|
||||
@@ -76,9 +60,3 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Ensure that the desk is connected if that is the expected state."""
|
||||
if self._expected_connected:
|
||||
await self.async_connect()
|
||||
|
||||
@callback
|
||||
def _async_handle_update(self, height: int | None) -> None:
|
||||
"""Handle an update from the desk."""
|
||||
self._height = height
|
||||
self._debouncer.async_schedule_call()
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from email.message import Message
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioimaplib import IMAP4_SSL, AioImapException, Response
|
||||
import voluptuous as vol
|
||||
@@ -33,6 +35,7 @@ from .coordinator import (
|
||||
ImapPollingDataUpdateCoordinator,
|
||||
ImapPushDataUpdateCoordinator,
|
||||
connect_to_server,
|
||||
get_parts,
|
||||
)
|
||||
from .errors import InvalidAuth, InvalidFolder
|
||||
|
||||
@@ -40,6 +43,7 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
CONF_ENTRY = "entry"
|
||||
CONF_SEEN = "seen"
|
||||
CONF_PART = "part"
|
||||
CONF_UID = "uid"
|
||||
CONF_TARGET_FOLDER = "target_folder"
|
||||
|
||||
@@ -64,6 +68,11 @@ SERVICE_MOVE_SCHEMA = _SERVICE_UID_SCHEMA.extend(
|
||||
)
|
||||
SERVICE_DELETE_SCHEMA = _SERVICE_UID_SCHEMA
|
||||
SERVICE_FETCH_TEXT_SCHEMA = _SERVICE_UID_SCHEMA
|
||||
SERVICE_FETCH_PART_SCHEMA = _SERVICE_UID_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PART): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
type ImapConfigEntry = ConfigEntry[ImapDataUpdateCoordinator]
|
||||
|
||||
@@ -216,12 +225,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
translation_placeholders={"error": str(exc)},
|
||||
) from exc
|
||||
raise_on_error(response, "fetch_failed")
|
||||
# Index 1 of of the response lines contains the bytearray with the message data
|
||||
message = ImapMessage(response.lines[1])
|
||||
await client.close()
|
||||
return {
|
||||
"text": message.text,
|
||||
"sender": message.sender,
|
||||
"subject": message.subject,
|
||||
"parts": get_parts(message.email_message),
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
@@ -233,6 +244,73 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
async def async_fetch_part(call: ServiceCall) -> ServiceResponse:
|
||||
"""Process fetch email part service and return content."""
|
||||
|
||||
@callback
|
||||
def get_message_part(message: Message, part_key: str) -> Message:
|
||||
part: Message | Any = message
|
||||
for index in part_key.split(","):
|
||||
sub_parts = part.get_payload()
|
||||
try:
|
||||
assert isinstance(sub_parts, list)
|
||||
part = sub_parts[int(index)]
|
||||
except (AssertionError, ValueError, IndexError) as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_part_index",
|
||||
) from exc
|
||||
|
||||
return part
|
||||
|
||||
entry_id: str = call.data[CONF_ENTRY]
|
||||
uid: str = call.data[CONF_UID]
|
||||
part_key: str = call.data[CONF_PART]
|
||||
_LOGGER.debug(
|
||||
"Fetch part %s for message %s. Entry: %s",
|
||||
part_key,
|
||||
uid,
|
||||
entry_id,
|
||||
)
|
||||
client = await async_get_imap_client(hass, entry_id)
|
||||
try:
|
||||
response = await client.fetch(uid, "BODY.PEEK[]")
|
||||
except (TimeoutError, AioImapException) as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="imap_server_fail",
|
||||
translation_placeholders={"error": str(exc)},
|
||||
) from exc
|
||||
raise_on_error(response, "fetch_failed")
|
||||
# Index 1 of of the response lines contains the bytearray with the message data
|
||||
message = ImapMessage(response.lines[1])
|
||||
await client.close()
|
||||
part_data = get_message_part(message.email_message, part_key)
|
||||
part_data_content = part_data.get_payload(decode=False)
|
||||
try:
|
||||
assert isinstance(part_data_content, str)
|
||||
except AssertionError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_part_index",
|
||||
) from exc
|
||||
return {
|
||||
"part_data": part_data_content,
|
||||
"content_type": part_data.get_content_type(),
|
||||
"content_transfer_encoding": part_data.get("Content-Transfer-Encoding"),
|
||||
"filename": part_data.get_filename(),
|
||||
"part": part_key,
|
||||
"uid": uid,
|
||||
}
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
"fetch_part",
|
||||
async_fetch_part,
|
||||
SERVICE_FETCH_PART_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
CONTENT_TYPE_TEXT_PLAIN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
@@ -209,6 +209,28 @@ class ImapMessage:
|
||||
return str(self.email_message.get_payload())
|
||||
|
||||
|
||||
@callback
|
||||
def get_parts(message: Message, prefix: str | None = None) -> dict[str, Any]:
|
||||
"""Return information about the parts of a multipart message."""
|
||||
parts: dict[str, Any] = {}
|
||||
if not message.is_multipart():
|
||||
return {}
|
||||
for index, part in enumerate(message.get_payload(), 0):
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(part, Message)
|
||||
key = f"{prefix},{index}" if prefix else f"{index}"
|
||||
if part.is_multipart():
|
||||
parts |= get_parts(part, key)
|
||||
continue
|
||||
parts[key] = {"content_type": part.get_content_type()}
|
||||
if filename := part.get_filename():
|
||||
parts[key]["filename"] = filename
|
||||
if content_transfer_encoding := part.get("Content-Transfer-Encoding"):
|
||||
parts[key]["content_transfer_encoding"] = content_transfer_encoding
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Base class for imap client."""
|
||||
|
||||
@@ -275,6 +297,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"sender": message.sender,
|
||||
"subject": message.subject,
|
||||
"uid": last_message_uid,
|
||||
"parts": get_parts(message.email_message),
|
||||
}
|
||||
data.update({key: getattr(message, key) for key in self._event_data_keys})
|
||||
if self.custom_event_template is not None:
|
||||
|
||||
@@ -21,6 +21,9 @@
|
||||
},
|
||||
"fetch": {
|
||||
"service": "mdi:email-sync-outline"
|
||||
},
|
||||
"fetch_part": {
|
||||
"service": "mdi:email-sync-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,3 +56,22 @@ fetch:
|
||||
example: "12"
|
||||
selector:
|
||||
text:
|
||||
|
||||
fetch_part:
|
||||
fields:
|
||||
entry:
|
||||
required: true
|
||||
selector:
|
||||
config_entry:
|
||||
integration: "imap"
|
||||
uid:
|
||||
required: true
|
||||
example: "12"
|
||||
selector:
|
||||
text:
|
||||
|
||||
part:
|
||||
required: true
|
||||
example: "0,1"
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -84,6 +84,9 @@
|
||||
"imap_server_fail": {
|
||||
"message": "The IMAP server failed to connect: {error}."
|
||||
},
|
||||
"invalid_part_index": {
|
||||
"message": "Invalid part index."
|
||||
},
|
||||
"seen_failed": {
|
||||
"message": "Marking message as seen failed with \"{error}\"."
|
||||
}
|
||||
@@ -148,6 +151,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"fetch_part": {
|
||||
"name": "Fetch message part",
|
||||
"description": "Fetches a message part or attachment from an email message.",
|
||||
"fields": {
|
||||
"entry": {
|
||||
"name": "[%key:component::imap::services::fetch::fields::entry::name%]",
|
||||
"description": "[%key:component::imap::services::fetch::fields::entry::description%]"
|
||||
},
|
||||
"uid": {
|
||||
"name": "[%key:component::imap::services::fetch::fields::uid::name%]",
|
||||
"description": "[%key:component::imap::services::fetch::fields::uid::description%]"
|
||||
},
|
||||
"part": {
|
||||
"name": "Part",
|
||||
"description": "The message part index."
|
||||
}
|
||||
}
|
||||
},
|
||||
"seen": {
|
||||
"name": "Mark message as seen",
|
||||
"description": "Marks an email as seen.",
|
||||
|
||||
@@ -35,7 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .config_flow import ( # Loading the config flow file will register the flow
|
||||
@@ -221,19 +221,6 @@ PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Konnected platform."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_firmware",
|
||||
breaks_in_ha_version="2026.4.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_firmware",
|
||||
translation_placeholders={
|
||||
"kb_page_url": "https://support.konnected.io/migrating-from-konnected-legacy-home-assistant-integration-to-esphome",
|
||||
},
|
||||
)
|
||||
if (cfg := config.get(DOMAIN)) is None:
|
||||
cfg = {}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"domain": "konnected",
|
||||
"name": "Konnected.io (Legacy)",
|
||||
"name": "Konnected.io",
|
||||
"codeowners": ["@heythisisnate"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["http"],
|
||||
|
||||
@@ -105,11 +105,5 @@
|
||||
"abort": {
|
||||
"not_konn_panel": "[%key:component::konnected::config::abort::not_konn_panel%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_firmware": {
|
||||
"title": "Konnected firmware is deprecated",
|
||||
"description": "Konnected's integration is deprecated and Konnected strongly recommends migrating to their ESPHome based firmware and integration by following the guide at {kb_page_url}. After this migration, make sure you don't have any Konnected YAML configuration left in your configuration.yaml file and remove this integration from Home Assistant."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Virtual integration: Konnected ESPHome."""
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "konnected_esphome",
|
||||
"name": "Konnected",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "esphome"
|
||||
}
|
||||
@@ -28,7 +28,7 @@ rules:
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
|
||||
@@ -22,7 +22,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -30,8 +29,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload Local file config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -65,6 +65,7 @@ class LocalFileConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
set_hold_time:
|
||||
target:
|
||||
device:
|
||||
integration: lyric
|
||||
entity:
|
||||
integration: lyric
|
||||
domain: climate
|
||||
|
||||
@@ -9,18 +9,6 @@
|
||||
"url": "The remote MCP server URL for the SSE endpoint, for example http://example/sse"
|
||||
}
|
||||
},
|
||||
"credentials_choice": {
|
||||
"title": "Choose how to authenticate with the MCP server",
|
||||
"description": "You can either use existing credentials from another integration or set up new credentials.",
|
||||
"menu_options": {
|
||||
"new_credentials": "Set up new credentials",
|
||||
"pick_implementation": "Use existing credentials"
|
||||
},
|
||||
"menu_option_descriptions": {
|
||||
"new_credentials": "You will be guided through setting up a new OAuth Client ID and secret.",
|
||||
"pick_implementation": "You may use previously entered OAuth credentials."
|
||||
}
|
||||
},
|
||||
"pick_implementation": {
|
||||
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]",
|
||||
"data": {
|
||||
@@ -39,21 +27,14 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"missing_capabilities": "The MCP server does not support a required capability (Tools)",
|
||||
"missing_credentials": "[%key:common::config_flow::abort::oauth2_missing_credentials%]",
|
||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||
"reauth_account_mismatch": "The authenticated user does not match the MCP Server user that needed re-authentication.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"oauth_error": "[%key:common::config_flow::abort::oauth2_error%]",
|
||||
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
|
||||
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
|
||||
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
|
||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]"
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,9 @@ from aiomealie import MealieAuthenticationError, MealieClient, MealieConnectionE
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
|
||||
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_PORT, CONF_VERIFY_SSL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER, MIN_REQUIRED_MEALIE_VERSION
|
||||
from .utils import create_version
|
||||
@@ -25,13 +26,21 @@ REAUTH_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
)
|
||||
DISCOVERY_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Mealie config flow."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
host: str | None = None
|
||||
verify_ssl: bool = True
|
||||
_hassio_discovery: dict[str, Any] | None = None
|
||||
|
||||
async def check_connection(
|
||||
self, api_token: str
|
||||
@@ -143,3 +152,59 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=USER_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_hassio(
|
||||
self, discovery_info: HassioServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a Mealie add-on.
|
||||
|
||||
This flow is triggered by the discovery component.
|
||||
"""
|
||||
await self._async_handle_discovery_without_unique_id()
|
||||
|
||||
self._hassio_discovery = discovery_info.config
|
||||
|
||||
return await self.async_step_hassio_confirm()
|
||||
|
||||
async def async_step_hassio_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm Supervisor discovery and prompt for API token."""
|
||||
if user_input is None:
|
||||
return await self._show_hassio_form()
|
||||
|
||||
assert self._hassio_discovery
|
||||
|
||||
self.host = (
|
||||
f"{self._hassio_discovery[CONF_HOST]}:{self._hassio_discovery[CONF_PORT]}"
|
||||
)
|
||||
self.verify_ssl = True
|
||||
|
||||
errors, user_id = await self.check_connection(
|
||||
user_input[CONF_API_TOKEN],
|
||||
)
|
||||
|
||||
if not errors:
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title="Mealie",
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
|
||||
CONF_VERIFY_SSL: self.verify_ssl,
|
||||
},
|
||||
)
|
||||
return await self._show_hassio_form(errors)
|
||||
|
||||
async def _show_hassio_form(
|
||||
self, errors: dict[str, str] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the Hass.io confirmation form to the user."""
|
||||
assert self._hassio_discovery
|
||||
return self.async_show_form(
|
||||
step_id="hassio_confirm",
|
||||
data_schema=DISCOVERY_SCHEMA,
|
||||
description_placeholders={"addon": self._hassio_discovery["addon"]},
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
@@ -39,8 +39,14 @@ rules:
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration will only discover a Mealie addon that is local, not on the network.
|
||||
discovery:
|
||||
status: done
|
||||
comment: |
|
||||
The integration will discover a Mealie addon posting a discovery message.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
|
||||
@@ -39,6 +39,16 @@
|
||||
"api_token": "[%key:component::mealie::common::data_description_api_token%]",
|
||||
"verify_ssl": "[%key:component::mealie::common::data_description_verify_ssl%]"
|
||||
}
|
||||
},
|
||||
"hassio_confirm": {
|
||||
"title": "Mealie via Home Assistant add-on",
|
||||
"description": "Do you want to configure Home Assistant to connect to the Mealie instance provided by the add-on: {addon}?",
|
||||
"data": {
|
||||
"api_token": "[%key:common::config_flow::data::api_token%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_token": "[%key:component::mealie::common::data_description_api_token%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -50,6 +60,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"wrong_account": "You have to use the same account that was used to configure the integration."
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.09.26"],
|
||||
"requirements": ["yt-dlp[default]==2025.09.23"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.translation import async_get_cached_translations
|
||||
|
||||
from .const import MEDIA_SOURCE_DATA, URI_SCHEME, URI_SCHEME_REGEX
|
||||
|
||||
@@ -63,15 +62,12 @@ class MediaSourceItem:
|
||||
async def async_browse(self) -> BrowseMediaSource:
|
||||
"""Browse this item."""
|
||||
if self.domain is None:
|
||||
title = async_get_cached_translations(
|
||||
self.hass, self.hass.config.language, "common", "media_source"
|
||||
).get("component.media_source.common.sources_default", "Media Sources")
|
||||
base = BrowseMediaSource(
|
||||
domain=None,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type=MediaType.APPS,
|
||||
title=title,
|
||||
title="Media Sources",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.APP,
|
||||
|
||||
@@ -9,8 +9,5 @@
|
||||
"unknown_media_source": {
|
||||
"message": "Unknown media source: {domain}"
|
||||
}
|
||||
},
|
||||
"common": {
|
||||
"sources_default": "Media sources"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -208,7 +208,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
|
||||
def __process_raw_value(self, entry: float | str | bytes) -> str | None:
|
||||
"""Process value from sensor with NaN handling, scaling, offset, min/max etc."""
|
||||
if self._nan_value is not None and entry in (self._nan_value, -self._nan_value):
|
||||
if self._nan_value and entry in (self._nan_value, -self._nan_value):
|
||||
return None
|
||||
if isinstance(entry, bytes):
|
||||
return entry.decode()
|
||||
|
||||
@@ -39,6 +39,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, CONF_INDOOR_HUMIDITY: source_entity_id},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
entry.async_on_unload(
|
||||
# We use async_handle_source_entity_changes to track changes to the humidity
|
||||
@@ -79,6 +80,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry,
|
||||
options={**entry.options, temp_sensor: data["entity_id"]},
|
||||
)
|
||||
hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
|
||||
return async_sensor_updated
|
||||
|
||||
@@ -89,7 +91,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
return True
|
||||
|
||||
@@ -99,11 +100,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
|
||||
|
||||
@@ -100,6 +100,7 @@ class MoldIndicatorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
set_text_overlay:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
fields:
|
||||
left_text:
|
||||
@@ -48,9 +47,8 @@ set_text_overlay:
|
||||
|
||||
action:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
fields:
|
||||
action:
|
||||
@@ -88,7 +86,6 @@ action:
|
||||
|
||||
snapshot:
|
||||
target:
|
||||
device:
|
||||
integration: motioneye
|
||||
entity:
|
||||
domain: camera
|
||||
integration: motioneye
|
||||
|
||||
@@ -51,7 +51,10 @@ from homeassistant.components.sensor import (
|
||||
DEVICE_CLASS_UNITS,
|
||||
STATE_CLASS_UNITS,
|
||||
SensorDeviceClass,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.components.sensor.helpers import (
|
||||
create_sensor_device_class_select_selector,
|
||||
create_sensor_state_class_select_selector,
|
||||
)
|
||||
from homeassistant.components.switch import SwitchDeviceClass
|
||||
from homeassistant.config_entries import (
|
||||
@@ -703,14 +706,6 @@ SCALE_SELECTOR = NumberSelector(
|
||||
step=1,
|
||||
)
|
||||
)
|
||||
SENSOR_DEVICE_CLASS_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in SensorDeviceClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="device_class_sensor",
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
SENSOR_ENTITY_CATEGORY_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[EntityCategory.DIAGNOSTIC.value],
|
||||
@@ -719,13 +714,6 @@ SENSOR_ENTITY_CATEGORY_SELECTOR = SelectSelector(
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
SENSOR_STATE_CLASS_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in SensorStateClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key=CONF_STATE_CLASS,
|
||||
)
|
||||
)
|
||||
SUPPORTED_COLOR_MODES_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[platform.value for platform in VALID_COLOR_MODES],
|
||||
@@ -1284,10 +1272,12 @@ PLATFORM_ENTITY_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
Platform.NOTIFY.value: {},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_DEVICE_CLASS: PlatformField(
|
||||
selector=SENSOR_DEVICE_CLASS_SELECTOR, required=False
|
||||
selector=create_sensor_device_class_select_selector(),
|
||||
required=False,
|
||||
),
|
||||
CONF_STATE_CLASS: PlatformField(
|
||||
selector=SENSOR_STATE_CLASS_SELECTOR, required=False
|
||||
selector=create_sensor_state_class_select_selector(),
|
||||
required=False,
|
||||
),
|
||||
CONF_UNIT_OF_MEASUREMENT: PlatformField(
|
||||
selector=unit_of_measurement_selector,
|
||||
|
||||
@@ -1200,69 +1200,6 @@
|
||||
"window": "[%key:component::cover::entity_component::window::name%]"
|
||||
}
|
||||
},
|
||||
"device_class_sensor": {
|
||||
"options": {
|
||||
"absolute_humidity": "[%key:component::sensor::entity_component::absolute_humidity::name%]",
|
||||
"apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]",
|
||||
"area": "[%key:component::sensor::entity_component::area::name%]",
|
||||
"aqi": "[%key:component::sensor::entity_component::aqi::name%]",
|
||||
"atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]",
|
||||
"battery": "[%key:component::sensor::entity_component::battery::name%]",
|
||||
"blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]",
|
||||
"carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]",
|
||||
"carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"conductivity": "[%key:component::sensor::entity_component::conductivity::name%]",
|
||||
"current": "[%key:component::sensor::entity_component::current::name%]",
|
||||
"data_rate": "[%key:component::sensor::entity_component::data_rate::name%]",
|
||||
"data_size": "[%key:component::sensor::entity_component::data_size::name%]",
|
||||
"date": "[%key:component::sensor::entity_component::date::name%]",
|
||||
"distance": "[%key:component::sensor::entity_component::distance::name%]",
|
||||
"duration": "[%key:component::sensor::entity_component::duration::name%]",
|
||||
"energy": "[%key:component::sensor::entity_component::energy::name%]",
|
||||
"energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]",
|
||||
"energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]",
|
||||
"enum": "Enumeration",
|
||||
"frequency": "[%key:component::sensor::entity_component::frequency::name%]",
|
||||
"gas": "[%key:component::sensor::entity_component::gas::name%]",
|
||||
"humidity": "[%key:component::sensor::entity_component::humidity::name%]",
|
||||
"illuminance": "[%key:component::sensor::entity_component::illuminance::name%]",
|
||||
"irradiance": "[%key:component::sensor::entity_component::irradiance::name%]",
|
||||
"moisture": "[%key:component::sensor::entity_component::moisture::name%]",
|
||||
"monetary": "[%key:component::sensor::entity_component::monetary::name%]",
|
||||
"nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]",
|
||||
"nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]",
|
||||
"ozone": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"ph": "[%key:component::sensor::entity_component::ph::name%]",
|
||||
"pm1": "[%key:component::sensor::entity_component::pm1::name%]",
|
||||
"pm4": "[%key:component::sensor::entity_component::pm4::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
"pm25": "[%key:component::sensor::entity_component::pm25::name%]",
|
||||
"power": "[%key:component::sensor::entity_component::power::name%]",
|
||||
"power_factor": "[%key:component::sensor::entity_component::power_factor::name%]",
|
||||
"precipitation": "[%key:component::sensor::entity_component::precipitation::name%]",
|
||||
"precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]",
|
||||
"pressure": "[%key:component::sensor::entity_component::pressure::name%]",
|
||||
"reactive_energy": "[%key:component::sensor::entity_component::reactive_energy::name%]",
|
||||
"reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]",
|
||||
"signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]",
|
||||
"sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]",
|
||||
"speed": "[%key:component::sensor::entity_component::speed::name%]",
|
||||
"sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]",
|
||||
"temperature": "[%key:component::sensor::entity_component::temperature::name%]",
|
||||
"timestamp": "[%key:component::sensor::entity_component::timestamp::name%]",
|
||||
"volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]",
|
||||
"volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds_parts::name%]",
|
||||
"voltage": "[%key:component::sensor::entity_component::voltage::name%]",
|
||||
"volume": "[%key:component::sensor::entity_component::volume::name%]",
|
||||
"volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]",
|
||||
"volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]",
|
||||
"water": "[%key:component::sensor::entity_component::water::name%]",
|
||||
"weight": "[%key:component::sensor::entity_component::weight::name%]",
|
||||
"wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]",
|
||||
"wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]"
|
||||
}
|
||||
},
|
||||
"device_class_switch": {
|
||||
"options": {
|
||||
"outlet": "[%key:component::switch::entity_component::outlet::name%]",
|
||||
@@ -1324,14 +1261,6 @@
|
||||
"custom": "Custom"
|
||||
}
|
||||
},
|
||||
"state_class": {
|
||||
"options": {
|
||||
"measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]",
|
||||
"measurement_angle": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement_angle%]",
|
||||
"total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]",
|
||||
"total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]"
|
||||
}
|
||||
},
|
||||
"supported_color_modes": {
|
||||
"options": {
|
||||
"onoff": "[%key:component::light::entity_component::_::state_attributes::color_mode::state::onoff%]",
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["nibe==2.18.0"]
|
||||
"requirements": ["nibe==2.19.0"]
|
||||
}
|
||||
|
||||
@@ -13,6 +13,6 @@ NMAP_TRACKED_DEVICES: Final = "nmap_tracked_devices"
|
||||
# Interval in minutes to exclude devices from a scan while they are home
|
||||
CONF_HOME_INTERVAL: Final = "home_interval"
|
||||
CONF_OPTIONS: Final = "scan_options"
|
||||
DEFAULT_OPTIONS: Final = "-F -T4 --min-rate 10 --host-timeout 5s"
|
||||
DEFAULT_OPTIONS: Final = "-n -sn -PR -T4 --min-rate 10 --host-timeout 5s"
|
||||
|
||||
TRACKER_SCAN_INTERVAL: Final = 120
|
||||
|
||||
@@ -34,7 +34,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
|
||||
await coordinator.fetch_data(dt_util.utcnow(), True)
|
||||
await coordinator.update_listeners(dt_util.utcnow())
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -44,10 +44,9 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
name=DOMAIN,
|
||||
)
|
||||
self.client = NordPoolClient(session=async_get_clientsession(hass))
|
||||
self.data_unsub: Callable[[], None] | None = None
|
||||
self.listener_unsub: Callable[[], None] | None = None
|
||||
self.unsub: Callable[[], None] | None = None
|
||||
|
||||
def get_next_data_interval(self, now: datetime) -> datetime:
|
||||
def get_next_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time an update should occur."""
|
||||
next_hour = dt_util.utcnow() + timedelta(hours=1)
|
||||
next_run = datetime(
|
||||
@@ -57,45 +56,23 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
next_hour.hour,
|
||||
tzinfo=dt_util.UTC,
|
||||
)
|
||||
LOGGER.debug("Next data update at %s", next_run)
|
||||
return next_run
|
||||
|
||||
def get_next_15_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time we need to notify listeners."""
|
||||
next_run = dt_util.utcnow() + timedelta(minutes=15)
|
||||
next_minute = next_run.minute // 15 * 15
|
||||
next_run = next_run.replace(
|
||||
minute=next_minute, second=0, microsecond=0, tzinfo=dt_util.UTC
|
||||
)
|
||||
|
||||
LOGGER.debug("Next listener update at %s", next_run)
|
||||
LOGGER.debug("Next update at %s", next_run)
|
||||
return next_run
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Cancel any scheduled call, and ignore new runs."""
|
||||
await super().async_shutdown()
|
||||
if self.data_unsub:
|
||||
self.data_unsub()
|
||||
self.data_unsub = None
|
||||
if self.listener_unsub:
|
||||
self.listener_unsub()
|
||||
self.listener_unsub = None
|
||||
|
||||
async def update_listeners(self, now: datetime) -> None:
|
||||
"""Update entity listeners."""
|
||||
self.listener_unsub = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
self.update_listeners,
|
||||
self.get_next_15_interval(dt_util.utcnow()),
|
||||
)
|
||||
self.async_update_listeners()
|
||||
if self.unsub:
|
||||
self.unsub()
|
||||
self.unsub = None
|
||||
|
||||
async def fetch_data(self, now: datetime, initial: bool = False) -> None:
|
||||
"""Fetch data from Nord Pool."""
|
||||
self.data_unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_data_interval(dt_util.utcnow())
|
||||
self.unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow())
|
||||
)
|
||||
if self.config_entry.pref_disable_polling and not initial:
|
||||
self.async_update_listeners()
|
||||
return
|
||||
try:
|
||||
data = await self.handle_data(initial)
|
||||
|
||||
@@ -307,7 +307,7 @@
|
||||
},
|
||||
"markdown": {
|
||||
"name": "Format as Markdown",
|
||||
"description": "Enable Markdown formatting for the message body. See the Markdown guide for syntax details: https://www.markdownguide.org/basic-syntax/."
|
||||
"description": "Enable Markdown formatting for the message body (Web app only). See the Markdown guide for syntax details: https://www.markdownguide.org/basic-syntax/."
|
||||
},
|
||||
"tags": {
|
||||
"name": "Tags/Emojis",
|
||||
|
||||
@@ -124,7 +124,7 @@ class NumberDeviceClass(StrEnum):
|
||||
CO = "carbon_monoxide"
|
||||
"""Carbon Monoxide gas concentration.
|
||||
|
||||
Unit of measurement: `ppm` (parts per million)
|
||||
Unit of measurement: `ppm` (parts per million), mg/m³
|
||||
"""
|
||||
|
||||
CO2 = "carbon_dioxide"
|
||||
@@ -475,7 +475,10 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
|
||||
NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure),
|
||||
NumberDeviceClass.BATTERY: {PERCENTAGE},
|
||||
NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration),
|
||||
NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CO: {
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
|
||||
},
|
||||
NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION},
|
||||
NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity),
|
||||
NumberDeviceClass.CURRENT: set(UnitOfElectricCurrent),
|
||||
|
||||
@@ -112,9 +112,6 @@
|
||||
"pm1": {
|
||||
"name": "[%key:component::sensor::entity_component::pm1::name%]"
|
||||
},
|
||||
"pm4": {
|
||||
"name": "[%key:component::sensor::entity_component::pm4::name%]"
|
||||
},
|
||||
"pm10": {
|
||||
"name": "[%key:component::sensor::entity_component::pm10::name%]"
|
||||
},
|
||||
|
||||
@@ -35,7 +35,7 @@ from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 32 * 320 * 1024 # 10.4MB
|
||||
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
CACHE_TTL = 300
|
||||
@@ -163,10 +163,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
)
|
||||
try:
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function,
|
||||
file,
|
||||
upload_chunk_size=UPLOAD_CHUNK_SIZE,
|
||||
session=async_get_clientsession(self._hass),
|
||||
self._token_function, file, session=async_get_clientsession(self._hass)
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
|
||||
@@ -52,10 +52,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: OnkyoConfigEntry) -> boo
|
||||
|
||||
try:
|
||||
info = await async_interview(host)
|
||||
except TimeoutError as exc:
|
||||
raise ConfigEntryNotReady(f"Timed out interviewing: {host}") from exc
|
||||
except OSError as exc:
|
||||
raise ConfigEntryNotReady(f"Unable to connect to: {host}") from exc
|
||||
if info is None:
|
||||
raise ConfigEntryNotReady(f"Unable to connect to: {host}")
|
||||
raise ConfigEntryNotReady(f"Unexpected exception interviewing: {host}") from exc
|
||||
|
||||
manager = ReceiverManager(hass, entry, info)
|
||||
|
||||
|
||||
@@ -109,24 +109,22 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.debug("Config flow manual: %s", host)
|
||||
try:
|
||||
info = await async_interview(host)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning("Timed out interviewing: %s", host)
|
||||
errors["base"] = "cannot_connect"
|
||||
except OSError:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
_LOGGER.exception("Unexpected exception interviewing: %s", host)
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
if info is None:
|
||||
errors["base"] = "cannot_connect"
|
||||
self._receiver_info = info
|
||||
|
||||
await self.async_set_unique_id(info.identifier, raise_on_progress=False)
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
else:
|
||||
self._receiver_info = info
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
await self.async_set_unique_id(
|
||||
info.identifier, raise_on_progress=False
|
||||
)
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
else:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return await self.async_step_configure_receiver()
|
||||
return await self.async_step_configure_receiver()
|
||||
|
||||
suggested_values = user_input
|
||||
if suggested_values is None and self.source == SOURCE_RECONFIGURE:
|
||||
@@ -214,13 +212,12 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
info = await async_interview(host)
|
||||
except OSError:
|
||||
_LOGGER.exception("Unexpected exception interviewing host %s", host)
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if info is None:
|
||||
_LOGGER.debug("SSDP eiscp is None: %s", host)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning("Timed out interviewing: %s", host)
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except OSError:
|
||||
_LOGGER.exception("Unexpected exception interviewing: %s", host)
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
await self.async_set_unique_id(info.identifier)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: info.host})
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user