mirror of
https://github.com/home-assistant/core.git
synced 2025-10-24 19:19:45 +00:00
Compare commits
171 Commits
mqtt-entit
...
scop-huawe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8858c4a72a | ||
|
|
e0d404456b | ||
|
|
439fc18860 | ||
|
|
774ab06206 | ||
|
|
f484db8f0e | ||
|
|
4af3c4f720 | ||
|
|
a020a32d8a | ||
|
|
1ac2ae3443 | ||
|
|
2fce7db132 | ||
|
|
6e49911e1c | ||
|
|
4215a16285 | ||
|
|
65ff4fe10e | ||
|
|
5b7675e389 | ||
|
|
3019744035 | ||
|
|
21ab630380 | ||
|
|
564ff12db0 | ||
|
|
6c919e698f | ||
|
|
5d644815fa | ||
|
|
8dfa0f2f65 | ||
|
|
f9484acbfa | ||
|
|
d0c0247086 | ||
|
|
b116619af1 | ||
|
|
a3d760156f | ||
|
|
6e194ad6ef | ||
|
|
1e2a21b69f | ||
|
|
e90fe96b4e | ||
|
|
4774ed508a | ||
|
|
8f4a4d4c47 | ||
|
|
a83bbe2332 | ||
|
|
e5b93d3275 | ||
|
|
1c024f58af | ||
|
|
fa86148df0 | ||
|
|
7c6bbb97ea | ||
|
|
a5af501da4 | ||
|
|
f23cfb5594 | ||
|
|
67a12dc007 | ||
|
|
5783b3a576 | ||
|
|
7bc43039bd | ||
|
|
23e2316c36 | ||
|
|
9e9c8f5724 | ||
|
|
11772dbc46 | ||
|
|
c12df5d776 | ||
|
|
b57ca143e6 | ||
|
|
b3e16bd4fa | ||
|
|
18d5035877 | ||
|
|
d6db50fcc7 | ||
|
|
84d9fa3bd7 | ||
|
|
b08eb3a201 | ||
|
|
c74c317922 | ||
|
|
9edc6249ca | ||
|
|
4fbcb79889 | ||
|
|
68fd5bc67e | ||
|
|
882d047bb5 | ||
|
|
5c070c8f03 | ||
|
|
854882d612 | ||
|
|
b078c0ee7e | ||
|
|
080b16a33d | ||
|
|
6a1cf9827c | ||
|
|
23e7b14eae | ||
|
|
2a5cf83f50 | ||
|
|
5dcb68cdf6 | ||
|
|
fedeca107a | ||
|
|
4fef19c7bc | ||
|
|
8c953b0c4e | ||
|
|
949544874f | ||
|
|
237407010a | ||
|
|
64e48816c7 | ||
|
|
6b76b3e729 | ||
|
|
4912280193 | ||
|
|
d4e72ad2cf | ||
|
|
711526fc6c | ||
|
|
4be428fce7 | ||
|
|
ea226806a0 | ||
|
|
bc77daf2ce | ||
|
|
acead56bd5 | ||
|
|
fd08c55b79 | ||
|
|
0c342c4750 | ||
|
|
da6986e58c | ||
|
|
2f5fbc1f0e | ||
|
|
e79c76cd35 | ||
|
|
6edafd8965 | ||
|
|
204ff5d45f | ||
|
|
591eb94515 | ||
|
|
0f3de627c5 | ||
|
|
b2699d8a03 | ||
|
|
769a770cf1 | ||
|
|
2d96e8ac4d | ||
|
|
354cacdcae | ||
|
|
d999dd05d1 | ||
|
|
81572c6a84 | ||
|
|
8165ac196f | ||
|
|
41c95247ec | ||
|
|
2eb3360e8c | ||
|
|
fcd07902b0 | ||
|
|
71f94cad97 | ||
|
|
05277aa708 | ||
|
|
9f74471d22 | ||
|
|
1c8487a7e7 | ||
|
|
3c8612b6fd | ||
|
|
f28892c526 | ||
|
|
24b7cf261c | ||
|
|
ef69e6d54b | ||
|
|
ca31a279fa | ||
|
|
e50c4c4787 | ||
|
|
3ecddda8dd | ||
|
|
af77f835a5 | ||
|
|
6de2016aa3 | ||
|
|
f1e72c1616 | ||
|
|
7af3eb638b | ||
|
|
363e5f088c | ||
|
|
5b1e3ef574 | ||
|
|
d607323731 | ||
|
|
31f595a3f8 | ||
|
|
9a27805349 | ||
|
|
477cdbb711 | ||
|
|
62b39fdd10 | ||
|
|
f806cc8b4b | ||
|
|
b6108001e4 | ||
|
|
56f33a8a5f | ||
|
|
1e91ad6e23 | ||
|
|
9032de4b26 | ||
|
|
553fcb5156 | ||
|
|
378295e1cc | ||
|
|
ff95c6235f | ||
|
|
d398a13899 | ||
|
|
10b300e573 | ||
|
|
e95c0ef3a8 | ||
|
|
3b09adb360 | ||
|
|
d2380608e1 | ||
|
|
37188a0832 | ||
|
|
3134fd75e8 | ||
|
|
861f4a0578 | ||
|
|
a82c512472 | ||
|
|
10392d9719 | ||
|
|
b7acc66153 | ||
|
|
6249cabcba | ||
|
|
84f2fd106d | ||
|
|
45cc68d3e4 | ||
|
|
7fd75c7742 | ||
|
|
9522b11042 | ||
|
|
c874c4ac73 | ||
|
|
907ef8fa15 | ||
|
|
bc93153c40 | ||
|
|
6964829699 | ||
|
|
62e59608b0 | ||
|
|
9507b3f3aa | ||
|
|
1d187abe10 | ||
|
|
0464cb8929 | ||
|
|
f410d94f80 | ||
|
|
dee3c11203 | ||
|
|
06e4b0a798 | ||
|
|
2fd55a49cb | ||
|
|
80d7224dcf | ||
|
|
9d03b1b9b4 | ||
|
|
cecdf553f3 | ||
|
|
54e6fbc042 | ||
|
|
9c098d3471 | ||
|
|
394575e4f7 | ||
|
|
effc33d0d2 | ||
|
|
7af4c337c6 | ||
|
|
4f222d7adf | ||
|
|
00f16812e4 | ||
|
|
0efaf7efe8 | ||
|
|
55643f0632 | ||
|
|
36f4723f6e | ||
|
|
03bc698936 | ||
|
|
0c1dc73422 | ||
|
|
c31537081b | ||
|
|
d13067abb3 | ||
|
|
64da32b5f9 | ||
|
|
3990fc6ab2 |
@@ -41,6 +41,7 @@
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
|
||||
1
.github/copilot-instructions.md
vendored
1
.github/copilot-instructions.md
vendored
@@ -74,6 +74,7 @@ rules:
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
|
||||
- **Testing**: pytest with plain functions and fixtures
|
||||
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
||||
|
||||
|
||||
22
.github/workflows/ci.yaml
vendored
22
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 9
|
||||
CACHE_VERSION: 1
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
@@ -428,7 +428,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||
python-version: &matrix-python ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
steps:
|
||||
- *checkout
|
||||
- &setup-python-matrix
|
||||
@@ -514,9 +514,7 @@ jobs:
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: &actions-cache-save actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
path: *path-apt-cache
|
||||
key: *key-apt-cache
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
@@ -641,7 +639,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
python-version: *matrix-python
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-matrix
|
||||
@@ -838,8 +836,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
python-version: *matrix-python
|
||||
group: &matrix-group ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
- name: Install additional OS dependencies
|
||||
@@ -964,7 +962,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
python-version: *matrix-python
|
||||
mariadb-group: ${{ fromJson(needs.info.outputs.mariadb_groups) }}
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
@@ -1081,7 +1079,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
python-version: *matrix-python
|
||||
postgresql-group: ${{ fromJson(needs.info.outputs.postgresql_groups) }}
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
@@ -1218,8 +1216,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
group: ${{ fromJson(needs.info.outputs.test_groups) }}
|
||||
python-version: *matrix-python
|
||||
group: *matrix-group
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
- name: Install additional OS dependencies
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
78
.github/workflows/wheels.yml
vendored
78
.github/workflows/wheels.yml
vendored
@@ -31,7 +31,8 @@ jobs:
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -91,7 +92,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: &actions-upload-artifact actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +100,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +119,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: *actions-upload-artifact
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -127,28 +128,41 @@ jobs:
|
||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp313"]
|
||||
matrix: &matrix-build
|
||||
abi: ["cp313", "cp314"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
exclude:
|
||||
- abi: cp314
|
||||
arch: armv7
|
||||
- abi: cp314
|
||||
arch: armhf
|
||||
- abi: cp314
|
||||
arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- *checkout
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
- &download-env-file
|
||||
name: Download env_file
|
||||
uses: &actions-download-artifact actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
- &download-build-constraints
|
||||
name: Download build_constraints
|
||||
uses: *actions-download-artifact
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
- &download-requirements-diff
|
||||
name: Download requirements_diff
|
||||
uses: *actions-download-artifact
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -160,7 +174,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
uses: &home-assistant-wheels home-assistant/wheels@2025.10.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -177,33 +191,19 @@ jobs:
|
||||
name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }}
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
matrix: *matrix-build
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- *checkout
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
- *download-env-file
|
||||
- *download-build-constraints
|
||||
- *download-requirements-diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: *actions-download-artifact
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
uses: *home-assistant-wheels
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
@@ -182,7 +182,6 @@ homeassistant.components.efergy.*
|
||||
homeassistant.components.eheimdigital.*
|
||||
homeassistant.components.electrasmart.*
|
||||
homeassistant.components.electric_kiwi.*
|
||||
homeassistant.components.elevenlabs.*
|
||||
homeassistant.components.elgato.*
|
||||
homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
@@ -279,6 +278,7 @@ homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.immich.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.inels.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
@@ -478,6 +478,7 @@ homeassistant.components.skybell.*
|
||||
homeassistant.components.slack.*
|
||||
homeassistant.components.sleep_as_android.*
|
||||
homeassistant.components.sleepiq.*
|
||||
homeassistant.components.sma.*
|
||||
homeassistant.components.smhi.*
|
||||
homeassistant.components.smlight.*
|
||||
homeassistant.components.smtp.*
|
||||
|
||||
2
.vscode/settings.default.json
vendored
2
.vscode/settings.default.json
vendored
@@ -7,6 +7,8 @@
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright is too pedantic for Home Assistant
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": [
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -741,6 +741,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/improv_ble/ @emontnemery
|
||||
/homeassistant/components/incomfort/ @jbouwh
|
||||
/tests/components/incomfort/ @jbouwh
|
||||
/homeassistant/components/inels/ @epdevlab
|
||||
/tests/components/inels/ @epdevlab
|
||||
/homeassistant/components/influxdb/ @mdegat01
|
||||
/tests/components/influxdb/ @mdegat01
|
||||
/homeassistant/components/inkbird/ @bdraco
|
||||
|
||||
4
Dockerfile
generated
4
Dockerfile
generated
@@ -25,13 +25,13 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.11/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.8.9
|
||||
RUN pip3 install uv==0.9.5
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -5,9 +5,6 @@ build_from:
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
|
||||
@@ -34,6 +34,9 @@ INPUT_FIELD_CODE = "code"
|
||||
|
||||
DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
||||
|
||||
GOOGLE_AUTHENTICATOR_URL = "https://support.google.com/accounts/answer/1066447"
|
||||
AUTHY_URL = "https://authy.com/"
|
||||
|
||||
|
||||
def _generate_qr_code(data: str) -> str:
|
||||
"""Generate a base64 PNG string represent QR Code image of data."""
|
||||
@@ -229,6 +232,8 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
||||
"code": self._ota_secret,
|
||||
"url": self._url,
|
||||
"qr_code": self._image,
|
||||
"google_authenticator_url": GOOGLE_AUTHENTICATOR_URL,
|
||||
"authy_url": AUTHY_URL,
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/adax",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["adax", "adax_local"],
|
||||
"requirements": ["adax==0.4.0", "Adax-local==0.1.5"]
|
||||
"requirements": ["adax==0.4.0", "Adax-local==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -53,9 +53,6 @@ __all__ = [
|
||||
"GenImageTaskResult",
|
||||
"async_generate_data",
|
||||
"async_generate_image",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, Final, final
|
||||
from typing import Final, final
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
|
||||
@@ -133,9 +133,9 @@ class AirQualityEntity(Entity):
|
||||
|
||||
@final
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
def state_attributes(self) -> dict[str, str | int | float]:
|
||||
"""Return the state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
data: dict[str, str | int | float] = {}
|
||||
|
||||
for prop, attr in PROP_TO_ATTR.items():
|
||||
if (value := getattr(self, prop)) is not None:
|
||||
|
||||
@@ -26,6 +26,10 @@ from .const import DOMAIN
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Documentation URL for API key generation
|
||||
_API_KEY_URL = "https://docs.airnowapi.org/account/request/"
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
|
||||
"""Validate the user input allows us to connect.
|
||||
|
||||
@@ -114,6 +118,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
}
|
||||
),
|
||||
description_placeholders={"api_key_url": _API_KEY_URL},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "To generate API key go to https://docs.airnowapi.org/account/request/",
|
||||
"description": "To generate API key go to {api_key_url}",
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["airos==0.5.6"]
|
||||
"requirements": ["airos==0.6.0"]
|
||||
}
|
||||
|
||||
@@ -301,12 +301,11 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
data[ATTR_CODE_FORMAT] = self.code_format
|
||||
data[ATTR_CHANGED_BY] = self.changed_by
|
||||
data[ATTR_CODE_ARM_REQUIRED] = self.code_arm_required
|
||||
return data
|
||||
return {
|
||||
ATTR_CODE_FORMAT: self.code_format,
|
||||
ATTR_CHANGED_BY: self.changed_by,
|
||||
ATTR_CODE_ARM_REQUIRED: self.code_arm_required,
|
||||
}
|
||||
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Call when the alarm control panel entity is added to hass."""
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.4.4"]
|
||||
"requirements": ["aioamazondevices==6.4.6"]
|
||||
}
|
||||
|
||||
@@ -41,6 +41,11 @@ APPS_NEW_ID = "add_new"
|
||||
CONF_APP_DELETE = "app_delete"
|
||||
CONF_APP_ID = "app_id"
|
||||
|
||||
_EXAMPLE_APP_ID = "com.plexapp.android"
|
||||
_EXAMPLE_APP_PLAY_STORE_URL = (
|
||||
f"https://play.google.com/store/apps/details?id={_EXAMPLE_APP_ID}"
|
||||
)
|
||||
|
||||
STEP_PAIR_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("pin"): str,
|
||||
@@ -355,5 +360,7 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
data_schema=data_schema,
|
||||
description_placeholders={
|
||||
"app_id": f"`{app_id}`" if app_id != APPS_NEW_ID else "",
|
||||
"example_app_id": _EXAMPLE_APP_ID,
|
||||
"example_app_play_store_url": _EXAMPLE_APP_PLAY_STORE_URL,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
},
|
||||
"data_description": {
|
||||
"app_name": "Name of the application as you would like it to be displayed in Home Assistant.",
|
||||
"app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android",
|
||||
"app_id": "E.g. {example_app_id} for {example_app_play_store_url}",
|
||||
"app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename",
|
||||
"app_delete": "Check this box to delete the application from the list."
|
||||
}
|
||||
|
||||
@@ -65,7 +65,6 @@ __all__ = (
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_setup",
|
||||
"async_update_pipeline",
|
||||
)
|
||||
|
||||
|
||||
@@ -19,7 +19,14 @@ import wave
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components import (
|
||||
conversation,
|
||||
media_player,
|
||||
stt,
|
||||
tts,
|
||||
wake_word,
|
||||
websocket_api,
|
||||
)
|
||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -130,7 +137,10 @@ SAVE_DELAY = 10
|
||||
@callback
|
||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||
"""Filter out intents that are not local fallback."""
|
||||
return result.intent.name in (intent.INTENT_GET_STATE)
|
||||
return result.intent.name in (
|
||||
intent.INTENT_GET_STATE,
|
||||
media_player.INTENT_MEDIA_SEARCH_AND_PLAY,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -125,7 +125,9 @@ class AsusWrtBridge(ABC):
|
||||
|
||||
@staticmethod
|
||||
def get_bridge(
|
||||
hass: HomeAssistant, conf: dict[str, Any], options: dict[str, Any] | None = None
|
||||
hass: HomeAssistant,
|
||||
conf: dict[str, str | int],
|
||||
options: dict[str, str | bool | int] | None = None,
|
||||
) -> AsusWrtBridge:
|
||||
"""Get Bridge instance."""
|
||||
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
||||
|
||||
@@ -175,12 +175,12 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def _async_check_connection(
|
||||
self, user_input: dict[str, Any]
|
||||
self, user_input: dict[str, str | int]
|
||||
) -> tuple[str, str | None]:
|
||||
"""Attempt to connect the AsusWrt router."""
|
||||
|
||||
api: AsusWrtBridge
|
||||
host: str = user_input[CONF_HOST]
|
||||
host = user_input[CONF_HOST]
|
||||
protocol = user_input[CONF_PROTOCOL]
|
||||
error: str | None = None
|
||||
|
||||
|
||||
@@ -176,7 +176,7 @@ class AsusWrtRouter:
|
||||
|
||||
self._on_close: list[Callable] = []
|
||||
|
||||
self._options: dict[str, Any] = {
|
||||
self._options: dict[str, str | bool | int] = {
|
||||
CONF_DNSMASQ: DEFAULT_DNSMASQ,
|
||||
CONF_INTERFACE: DEFAULT_INTERFACE,
|
||||
CONF_REQUIRE_IP: True,
|
||||
@@ -299,12 +299,10 @@ class AsusWrtRouter:
|
||||
_LOGGER.warning("Reconnected to ASUS router %s", self.host)
|
||||
|
||||
self._connected_devices = len(wrt_devices)
|
||||
consider_home: int = self._options.get(
|
||||
CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
)
|
||||
track_unknown: bool = self._options.get(
|
||||
CONF_TRACK_UNKNOWN, DEFAULT_TRACK_UNKNOWN
|
||||
consider_home = int(
|
||||
self._options.get(CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds())
|
||||
)
|
||||
track_unknown = self._options.get(CONF_TRACK_UNKNOWN, DEFAULT_TRACK_UNKNOWN)
|
||||
|
||||
for device_mac, device in self._devices.items():
|
||||
dev_info = wrt_devices.pop(device_mac, None)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Set up two-factor authentication using TOTP",
|
||||
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator](https://support.google.com/accounts/answer/1066447) or [Authy](https://authy.com/).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**."
|
||||
"description": "To activate two-factor authentication using time-based one-time passwords, scan the QR code with your authentication app. If you don't have one, we recommend either [Google Authenticator]({google_authenticator_url}) or [Authy]({authy_url}).\n\n{qr_code}\n\nAfter scanning the code, enter the six-digit code from your app to verify the setup. If you have problems scanning the QR code, do a manual setup with code **`{code}`**."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bring-api==1.1.0"]
|
||||
"requirements": ["bring-api==1.1.1"]
|
||||
}
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bthome",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bthome-ble==3.14.2"]
|
||||
"requirements": ["bthome-ble==3.15.0"]
|
||||
}
|
||||
|
||||
@@ -525,18 +525,17 @@ class CalendarEntity(Entity):
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the entity state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
if (event := self.event) is None:
|
||||
return data or None
|
||||
return None
|
||||
|
||||
data["message"] = event.summary
|
||||
data["all_day"] = event.all_day
|
||||
data["start_time"] = event.start_datetime_local.strftime(DATE_STR_FORMAT)
|
||||
data["end_time"] = event.end_datetime_local.strftime(DATE_STR_FORMAT)
|
||||
data["location"] = event.location if event.location else ""
|
||||
data["description"] = event.description if event.description else ""
|
||||
return data
|
||||
return {
|
||||
"message": event.summary,
|
||||
"all_day": event.all_day,
|
||||
"start_time": event.start_datetime_local.strftime(DATE_STR_FORMAT),
|
||||
"end_time": event.end_datetime_local.strftime(DATE_STR_FORMAT),
|
||||
"location": event.location if event.location else "",
|
||||
"description": event.description if event.description else "",
|
||||
}
|
||||
|
||||
@final
|
||||
@property
|
||||
|
||||
@@ -664,9 +664,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, str | None]:
|
||||
"""Return the camera state attributes."""
|
||||
attrs: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
attrs["access_token"] = self.access_tokens[-1]
|
||||
attrs = {"access_token": self.access_tokens[-1]}
|
||||
|
||||
if model := self.model:
|
||||
attrs["model_name"] = model
|
||||
|
||||
@@ -341,16 +341,16 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the optional state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
supported_features = self.supported_features
|
||||
temperature_unit = self.temperature_unit
|
||||
precision = self.precision
|
||||
hass = self.hass
|
||||
|
||||
data[ATTR_CURRENT_TEMPERATURE] = show_temp(
|
||||
hass, self.current_temperature, temperature_unit, precision
|
||||
)
|
||||
data: dict[str, str | float | None] = {
|
||||
ATTR_CURRENT_TEMPERATURE: show_temp(
|
||||
hass, self.current_temperature, temperature_unit, precision
|
||||
),
|
||||
}
|
||||
|
||||
if ClimateEntityFeature.TARGET_TEMPERATURE in supported_features:
|
||||
data[ATTR_TEMPERATURE] = show_temp(
|
||||
|
||||
@@ -78,7 +78,10 @@ class CompitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={"compit_url": "https://inext.compit.pl/"},
|
||||
)
|
||||
|
||||
async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Please enter your https://inext.compit.pl/ credentials.",
|
||||
"description": "Please enter your {compit_url} credentials.",
|
||||
"title": "Connect to Compit iNext",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
|
||||
@@ -87,7 +87,6 @@ __all__ = [
|
||||
"async_get_chat_log",
|
||||
"async_get_result_from_chat_log",
|
||||
"async_set_agent",
|
||||
"async_setup",
|
||||
"async_unset_agent",
|
||||
]
|
||||
|
||||
|
||||
@@ -267,7 +267,7 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
data = {}
|
||||
|
||||
if (current := self.current_cover_position) is not None:
|
||||
data[ATTR_CURRENT_POSITION] = current
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pycync==0.4.1"]
|
||||
"requirements": ["pycync==0.4.2"]
|
||||
}
|
||||
|
||||
@@ -3,12 +3,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.valve import ValveEntity, ValveEntityFeature, ValveState
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_utc_time_change
|
||||
|
||||
OPEN_CLOSE_DELAY = 2 # Used to give a realistic open/close experience in frontend
|
||||
|
||||
@@ -23,6 +25,8 @@ async def async_setup_entry(
|
||||
[
|
||||
DemoValve("Front Garden", ValveState.OPEN),
|
||||
DemoValve("Orchard", ValveState.CLOSED),
|
||||
DemoValve("Back Garden", ValveState.CLOSED, position=70),
|
||||
DemoValve("Trees", ValveState.CLOSED, position=30),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -37,6 +41,7 @@ class DemoValve(ValveEntity):
|
||||
name: str,
|
||||
state: str,
|
||||
moveable: bool = True,
|
||||
position: int | None = None,
|
||||
) -> None:
|
||||
"""Initialize the valve."""
|
||||
self._attr_name = name
|
||||
@@ -46,11 +51,23 @@ class DemoValve(ValveEntity):
|
||||
)
|
||||
self._state = state
|
||||
self._moveable = moveable
|
||||
self._attr_reports_position = False
|
||||
self._unsub_listener_valve: CALLBACK_TYPE | None = None
|
||||
self._set_position: int = 0
|
||||
self._position: int = 0
|
||||
if position is None:
|
||||
return
|
||||
|
||||
self._position = self._set_position = position
|
||||
self._attr_reports_position = True
|
||||
self._attr_supported_features |= (
|
||||
ValveEntityFeature.SET_POSITION | ValveEntityFeature.STOP
|
||||
)
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
"""Return true if valve is open."""
|
||||
return self._state == ValveState.OPEN
|
||||
def current_valve_position(self) -> int:
|
||||
"""Return current position of valve."""
|
||||
return self._position
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool:
|
||||
@@ -67,11 +84,6 @@ class DemoValve(ValveEntity):
|
||||
"""Return true if valve is closed."""
|
||||
return self._state == ValveState.CLOSED
|
||||
|
||||
@property
|
||||
def reports_position(self) -> bool:
|
||||
"""Return True if entity reports position, False otherwise."""
|
||||
return False
|
||||
|
||||
async def async_open_valve(self, **kwargs: Any) -> None:
|
||||
"""Open the valve."""
|
||||
self._state = ValveState.OPENING
|
||||
@@ -87,3 +99,45 @@ class DemoValve(ValveEntity):
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.CLOSED
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_stop_valve(self) -> None:
|
||||
"""Stop the valve."""
|
||||
self._state = ValveState.OPEN if self._position > 0 else ValveState.CLOSED
|
||||
if self._unsub_listener_valve is not None:
|
||||
self._unsub_listener_valve()
|
||||
self._unsub_listener_valve = None
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_valve_position(self, position: int) -> None:
|
||||
"""Move the valve to a specific position."""
|
||||
if position == self._position:
|
||||
return
|
||||
if position > self._position:
|
||||
self._state = ValveState.OPENING
|
||||
else:
|
||||
self._state = ValveState.CLOSING
|
||||
|
||||
self._set_position = round(position, -1)
|
||||
self._listen_valve()
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _listen_valve(self) -> None:
|
||||
"""Listen for changes in valve."""
|
||||
if self._unsub_listener_valve is None:
|
||||
self._unsub_listener_valve = async_track_utc_time_change(
|
||||
self.hass, self._time_changed_valve
|
||||
)
|
||||
|
||||
async def _time_changed_valve(self, now: datetime) -> None:
|
||||
"""Track time changes."""
|
||||
if self._state == ValveState.OPENING:
|
||||
self._position += 10
|
||||
elif self._state == ValveState.CLOSING:
|
||||
self._position -= 10
|
||||
|
||||
if self._position in (100, 0, self._set_position):
|
||||
await self.async_stop_valve()
|
||||
return
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import STATE_HOME
|
||||
from homeassistant.const import ATTR_GPS_ACCURACY, STATE_HOME # noqa: F401
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from .config_entry import (
|
||||
from .config_entry import ( # noqa: F401
|
||||
ScannerEntity,
|
||||
ScannerEntityDescription,
|
||||
TrackerEntity,
|
||||
@@ -15,7 +15,7 @@ from .config_entry import (
|
||||
async_setup_entry,
|
||||
async_unload_entry,
|
||||
)
|
||||
from .const import (
|
||||
from .const import ( # noqa: F401
|
||||
ATTR_ATTRIBUTES,
|
||||
ATTR_BATTERY,
|
||||
ATTR_DEV_ID,
|
||||
@@ -37,7 +37,7 @@ from .const import (
|
||||
SCAN_INTERVAL,
|
||||
SourceType,
|
||||
)
|
||||
from .legacy import (
|
||||
from .legacy import ( # noqa: F401
|
||||
PLATFORM_SCHEMA,
|
||||
PLATFORM_SCHEMA_BASE,
|
||||
SERVICE_SEE,
|
||||
@@ -61,44 +61,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the device tracker."""
|
||||
async_setup_legacy_integration(hass, config)
|
||||
return True
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ATTR_ATTRIBUTES",
|
||||
"ATTR_BATTERY",
|
||||
"ATTR_DEV_ID",
|
||||
"ATTR_GPS",
|
||||
"ATTR_HOST_NAME",
|
||||
"ATTR_IP",
|
||||
"ATTR_LOCATION_NAME",
|
||||
"ATTR_MAC",
|
||||
"ATTR_SOURCE_TYPE",
|
||||
"CONF_CONSIDER_HOME",
|
||||
"CONF_NEW_DEVICE_DEFAULTS",
|
||||
"CONF_SCAN_INTERVAL",
|
||||
"CONF_TRACK_NEW",
|
||||
"CONNECTED_DEVICE_REGISTERED",
|
||||
"DEFAULT_CONSIDER_HOME",
|
||||
"DEFAULT_TRACK_NEW",
|
||||
"DOMAIN",
|
||||
"ENTITY_ID_FORMAT",
|
||||
"PLATFORM_SCHEMA",
|
||||
"PLATFORM_SCHEMA_BASE",
|
||||
"SCAN_INTERVAL",
|
||||
"SERVICE_SEE",
|
||||
"SERVICE_SEE_PAYLOAD_SCHEMA",
|
||||
"SOURCE_TYPES",
|
||||
"AsyncSeeCallback",
|
||||
"DeviceScanner",
|
||||
"ScannerEntity",
|
||||
"ScannerEntityDescription",
|
||||
"SeeCallback",
|
||||
"SourceType",
|
||||
"TrackerEntity",
|
||||
"TrackerEntityDescription",
|
||||
"async_setup",
|
||||
"async_setup_entry",
|
||||
"async_unload_entry",
|
||||
"is_on",
|
||||
"see",
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any, final
|
||||
from typing import final
|
||||
|
||||
from propcache.api import cached_property
|
||||
|
||||
@@ -28,6 +28,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
@@ -188,11 +189,9 @@ class BaseTrackerEntity(Entity):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
def state_attributes(self) -> dict[str, StateType]:
|
||||
"""Return the device state attributes."""
|
||||
attr: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
attr[ATTR_SOURCE_TYPE] = self.source_type
|
||||
attr: dict[str, StateType] = {ATTR_SOURCE_TYPE: self.source_type}
|
||||
|
||||
if self.battery_level is not None:
|
||||
attr[ATTR_BATTERY_LEVEL] = self.battery_level
|
||||
@@ -279,9 +278,9 @@ class TrackerEntity(
|
||||
|
||||
@final
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
def state_attributes(self) -> dict[str, StateType]:
|
||||
"""Return the device state attributes."""
|
||||
attr: dict[str, Any] = {}
|
||||
attr: dict[str, StateType] = {}
|
||||
attr.update(super().state_attributes)
|
||||
|
||||
if self.latitude is not None and self.longitude is not None:
|
||||
@@ -432,10 +431,9 @@ class ScannerEntity(
|
||||
|
||||
@final
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
def state_attributes(self) -> dict[str, StateType]:
|
||||
"""Return the device state attributes."""
|
||||
attr: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
attr.update(super().state_attributes)
|
||||
attr = super().state_attributes
|
||||
|
||||
if ip_address := self.ip_address:
|
||||
attr[ATTR_IP] = ip_address
|
||||
|
||||
@@ -48,7 +48,7 @@ from homeassistant.helpers.event import (
|
||||
async_track_utc_time_change,
|
||||
)
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType, GPSType
|
||||
from homeassistant.helpers.typing import ConfigType, GPSType, StateType
|
||||
from homeassistant.setup import (
|
||||
SetupPhases,
|
||||
async_notify_setup_error,
|
||||
@@ -842,11 +842,9 @@ class Device(RestoreEntity):
|
||||
|
||||
@final
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
def state_attributes(self) -> dict[str, StateType]:
|
||||
"""Return the device state attributes."""
|
||||
attributes: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
attributes[ATTR_SOURCE_TYPE] = self.source_type
|
||||
attributes: dict[str, StateType] = {ATTR_SOURCE_TYPE: self.source_type}
|
||||
|
||||
if self.gps is not None:
|
||||
attributes[ATTR_LATITUDE] = self.gps[0]
|
||||
|
||||
@@ -80,8 +80,7 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
# The pylint disable is needed because of https://github.com/pylint-dev/pylint/issues/9138
|
||||
class DevoloScannerEntity( # pylint: disable=hass-enforce-class-module
|
||||
class DevoloScannerEntity(
|
||||
CoordinatorEntity[DevoloDataUpdateCoordinator[dict[str, ConnectedStationInfo]]],
|
||||
ScannerEntity,
|
||||
):
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@mib1185", "@edenhaus", "@Augar"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==15.1.0"]
|
||||
|
||||
@@ -21,6 +21,9 @@ DEFAULT_STT_MODEL = "scribe_v1"
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
||||
MAX_REQUEST_IDS = 3
|
||||
MODELS_PREVIOUS_INFO_NOT_SUPPORTED = ("eleven_v3",)
|
||||
|
||||
STT_LANGUAGES = [
|
||||
"af-ZA", # Afrikaans
|
||||
"am-ET", # Amharic
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elevenlabs"],
|
||||
"requirements": ["elevenlabs==2.3.0"]
|
||||
"requirements": ["elevenlabs==2.3.0", "sentence-stream==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -85,4 +85,4 @@ rules:
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
strict-typing: todo
|
||||
|
||||
@@ -2,17 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import asyncio
|
||||
from collections import deque
|
||||
from collections.abc import AsyncGenerator, Mapping
|
||||
import contextlib
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from elevenlabs import AsyncElevenLabs
|
||||
from elevenlabs.core import ApiError
|
||||
from elevenlabs.types import Model, Voice as ElevenLabsVoice, VoiceSettings
|
||||
from sentence_stream import SentenceBoundaryDetector
|
||||
|
||||
from homeassistant.components.tts import (
|
||||
ATTR_VOICE,
|
||||
TextToSpeechEntity,
|
||||
TTSAudioRequest,
|
||||
TTSAudioResponse,
|
||||
TtsAudioType,
|
||||
Voice,
|
||||
)
|
||||
@@ -35,10 +41,12 @@ from .const import (
|
||||
DEFAULT_STYLE,
|
||||
DEFAULT_USE_SPEAKER_BOOST,
|
||||
DOMAIN,
|
||||
MAX_REQUEST_IDS,
|
||||
MODELS_PREVIOUS_INFO_NOT_SUPPORTED,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
PARALLEL_UPDATES = 0
|
||||
PARALLEL_UPDATES = 6
|
||||
|
||||
|
||||
def to_voice_settings(options: Mapping[str, Any]) -> VoiceSettings:
|
||||
@@ -122,7 +130,12 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
self._attr_supported_languages = [
|
||||
lang.language_id for lang in self._model.languages or []
|
||||
]
|
||||
self._attr_default_language = self.supported_languages[0]
|
||||
# Use the first supported language as the default if available
|
||||
self._attr_default_language = (
|
||||
self._attr_supported_languages[0]
|
||||
if self._attr_supported_languages
|
||||
else "en"
|
||||
)
|
||||
|
||||
def async_get_supported_voices(self, language: str) -> list[Voice]:
|
||||
"""Return a list of supported voices for a language."""
|
||||
@@ -151,3 +164,151 @@ class ElevenLabsTTSEntity(TextToSpeechEntity):
|
||||
)
|
||||
raise HomeAssistantError(exc) from exc
|
||||
return "mp3", bytes_combined
|
||||
|
||||
async def async_stream_tts_audio(
|
||||
self, request: TTSAudioRequest
|
||||
) -> TTSAudioResponse:
|
||||
"""Generate speech from an incoming message."""
|
||||
_LOGGER.debug(
|
||||
"Getting TTS audio for language %s and options: %s",
|
||||
request.language,
|
||||
request.options,
|
||||
)
|
||||
return TTSAudioResponse("mp3", self._process_tts_stream(request))
|
||||
|
||||
async def _process_tts_stream(
|
||||
self, request: TTSAudioRequest
|
||||
) -> AsyncGenerator[bytes]:
|
||||
"""Generate speech from an incoming message."""
|
||||
text_stream = request.message_gen
|
||||
boundary_detector = SentenceBoundaryDetector()
|
||||
sentences: list[str] = []
|
||||
sentences_ready = asyncio.Event()
|
||||
sentences_complete = False
|
||||
|
||||
language_code: str | None = request.language
|
||||
voice_id = request.options.get(ATTR_VOICE, self._default_voice_id)
|
||||
model = request.options.get(ATTR_MODEL, self._model.model_id)
|
||||
|
||||
use_request_ids = model not in MODELS_PREVIOUS_INFO_NOT_SUPPORTED
|
||||
previous_request_ids: deque[str] = deque(maxlen=MAX_REQUEST_IDS)
|
||||
|
||||
base_stream_params = {
|
||||
"voice_id": voice_id,
|
||||
"model_id": model,
|
||||
"output_format": "mp3_44100_128",
|
||||
"voice_settings": self._voice_settings,
|
||||
}
|
||||
if language_code:
|
||||
base_stream_params["language_code"] = language_code
|
||||
|
||||
_LOGGER.debug("Starting TTS Stream with options: %s", base_stream_params)
|
||||
|
||||
async def _add_sentences() -> None:
|
||||
nonlocal sentences_complete
|
||||
|
||||
try:
|
||||
# Text chunks may not be on word or sentence boundaries
|
||||
async for text_chunk in text_stream:
|
||||
for sentence in boundary_detector.add_chunk(text_chunk):
|
||||
if not sentence.strip():
|
||||
continue
|
||||
|
||||
sentences.append(sentence)
|
||||
|
||||
if not sentences:
|
||||
continue
|
||||
|
||||
sentences_ready.set()
|
||||
|
||||
# Final sentence
|
||||
if text := boundary_detector.finish():
|
||||
sentences.append(text)
|
||||
finally:
|
||||
sentences_complete = True
|
||||
sentences_ready.set()
|
||||
|
||||
_add_sentences_task = self.hass.async_create_background_task(
|
||||
_add_sentences(), name="elevenlabs_tts_add_sentences"
|
||||
)
|
||||
|
||||
# Process new sentences as they're available, but synthesize the first
|
||||
# one immediately. While that's playing, synthesize (up to) the next 3
|
||||
# sentences. After that, synthesize all completed sentences as they're
|
||||
# available.
|
||||
sentence_schedule = [1, 3]
|
||||
while True:
|
||||
await sentences_ready.wait()
|
||||
|
||||
# Don't wait again if no more sentences are coming
|
||||
if not sentences_complete:
|
||||
sentences_ready.clear()
|
||||
|
||||
if not sentences:
|
||||
if sentences_complete:
|
||||
# Exit TTS loop
|
||||
_LOGGER.debug("No more sentences to process")
|
||||
break
|
||||
|
||||
# More sentences may be coming
|
||||
continue
|
||||
|
||||
new_sentences = sentences[:]
|
||||
sentences.clear()
|
||||
|
||||
while new_sentences:
|
||||
if sentence_schedule:
|
||||
max_sentences = sentence_schedule.pop(0)
|
||||
sentences_to_process = new_sentences[:max_sentences]
|
||||
new_sentences = new_sentences[len(sentences_to_process) :]
|
||||
else:
|
||||
# Process all available sentences together
|
||||
sentences_to_process = new_sentences[:]
|
||||
new_sentences.clear()
|
||||
|
||||
# Combine all new sentences completed to this point
|
||||
text = " ".join(sentences_to_process).strip()
|
||||
|
||||
if not text:
|
||||
continue
|
||||
|
||||
# Build kwargs common to both modes
|
||||
kwargs = base_stream_params | {
|
||||
"text": text,
|
||||
}
|
||||
|
||||
# Provide previous_request_ids if supported.
|
||||
if previous_request_ids:
|
||||
# Send previous request ids.
|
||||
kwargs["previous_request_ids"] = list(previous_request_ids)
|
||||
|
||||
# Synthesize audio while text chunks are still being accumulated
|
||||
_LOGGER.debug("Synthesizing TTS for text: %s", text)
|
||||
try:
|
||||
async with self._client.text_to_speech.with_raw_response.stream(
|
||||
**kwargs
|
||||
) as stream:
|
||||
async for chunk_bytes in stream.data:
|
||||
yield chunk_bytes
|
||||
|
||||
if use_request_ids:
|
||||
if (rid := stream.headers.get("request-id")) is not None:
|
||||
previous_request_ids.append(rid)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"No request-id returned from server; clearing previous requests"
|
||||
)
|
||||
previous_request_ids.clear()
|
||||
except ApiError as exc:
|
||||
_LOGGER.warning(
|
||||
"Error during processing of TTS request %s", exc, exc_info=True
|
||||
)
|
||||
_add_sentences_task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await _add_sentences_task
|
||||
raise HomeAssistantError(exc) from exc
|
||||
|
||||
# Capture and store server request-id for next calls (only when supported)
|
||||
_LOGGER.debug("Completed TTS stream for text: %s", text)
|
||||
|
||||
_LOGGER.debug("Completed TTS stream")
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any, cast
|
||||
from aioesphomeapi import (
|
||||
ClimateAction,
|
||||
ClimateFanMode,
|
||||
ClimateFeature,
|
||||
ClimateInfo,
|
||||
ClimateMode,
|
||||
ClimatePreset,
|
||||
@@ -134,12 +135,16 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = "climate"
|
||||
_feature_flags = ClimateFeature(0)
|
||||
|
||||
@callback
|
||||
def _on_static_info_update(self, static_info: EntityInfo) -> None:
|
||||
"""Set attrs from static info."""
|
||||
super()._on_static_info_update(static_info)
|
||||
static_info = self._static_info
|
||||
self._feature_flags = ClimateFeature(
|
||||
static_info.supported_feature_flags_compat(self._api_version)
|
||||
)
|
||||
self._attr_precision = self._get_precision()
|
||||
self._attr_hvac_modes = [
|
||||
_CLIMATE_MODES.from_esphome(mode) for mode in static_info.supported_modes
|
||||
@@ -163,11 +168,18 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
self._attr_max_temp = static_info.visual_max_temperature
|
||||
self._attr_min_humidity = round(static_info.visual_min_humidity)
|
||||
self._attr_max_humidity = round(static_info.visual_max_humidity)
|
||||
features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if static_info.supports_two_point_target_temperature:
|
||||
features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
if static_info.supports_target_humidity:
|
||||
features = ClimateEntityFeature(0)
|
||||
if self._feature_flags & ClimateFeature.SUPPORTS_TARGET_HUMIDITY:
|
||||
features |= ClimateEntityFeature.TARGET_HUMIDITY
|
||||
if self._feature_flags & ClimateFeature.REQUIRES_TWO_POINT_TARGET_TEMPERATURE:
|
||||
features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
else:
|
||||
features |= ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if (
|
||||
self._feature_flags
|
||||
& ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE
|
||||
):
|
||||
features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
if self.preset_modes:
|
||||
features |= ClimateEntityFeature.PRESET_MODE
|
||||
if self.fan_modes:
|
||||
@@ -203,7 +215,7 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
"""Return current action."""
|
||||
# HA has no support feature field for hvac_action
|
||||
if not self._static_info.supports_action:
|
||||
if not self._feature_flags & ClimateFeature.SUPPORTS_ACTION:
|
||||
return None
|
||||
return _CLIMATE_ACTIONS.from_esphome(self._state.action)
|
||||
|
||||
@@ -233,7 +245,7 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_float_state_property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if not self._static_info.supports_current_temperature:
|
||||
if not self._feature_flags & ClimateFeature.SUPPORTS_CURRENT_TEMPERATURE:
|
||||
return None
|
||||
return self._state.current_temperature
|
||||
|
||||
@@ -242,7 +254,7 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
if (
|
||||
not self._static_info.supports_current_humidity
|
||||
(not self._feature_flags & ClimateFeature.SUPPORTS_CURRENT_HUMIDITY)
|
||||
or (val := self._state.current_humidity) is None
|
||||
or not isfinite(val)
|
||||
):
|
||||
@@ -254,7 +266,11 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
if (
|
||||
not self._static_info.supports_two_point_target_temperature
|
||||
not self._feature_flags
|
||||
& (
|
||||
ClimateFeature.REQUIRES_TWO_POINT_TARGET_TEMPERATURE
|
||||
| ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE
|
||||
)
|
||||
and self.hvac_mode != HVACMode.AUTO
|
||||
):
|
||||
return self._state.target_temperature
|
||||
@@ -295,7 +311,10 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
cast(HVACMode, kwargs[ATTR_HVAC_MODE])
|
||||
)
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
if not self._static_info.supports_two_point_target_temperature:
|
||||
if not self._feature_flags & (
|
||||
ClimateFeature.REQUIRES_TWO_POINT_TARGET_TEMPERATURE
|
||||
| ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE
|
||||
):
|
||||
data["target_temperature"] = kwargs[ATTR_TEMPERATURE]
|
||||
else:
|
||||
hvac_mode = kwargs.get(ATTR_HVAC_MODE) or self.hvac_mode
|
||||
|
||||
@@ -542,7 +542,16 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
# Check if Z-Wave capabilities are present and start discovery flow
|
||||
next_flow_id: str | None = None
|
||||
if self._device_info.zwave_proxy_feature_flags:
|
||||
# If the zwave_home_id is not set, we don't know if it's a fresh
|
||||
# adapter, or the cable is just unplugged. So only start
|
||||
# the zwave_js config flow automatically if there is a
|
||||
# zwave_home_id present. If it's a fresh adapter, the manager
|
||||
# will handle starting the flow once it gets the home id changed
|
||||
# request from the ESPHome device.
|
||||
if (
|
||||
self._device_info.zwave_proxy_feature_flags
|
||||
and self._device_info.zwave_home_id
|
||||
):
|
||||
assert self._connected_address is not None
|
||||
assert self._port is not None
|
||||
|
||||
@@ -559,7 +568,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
data=ESPHomeServiceInfo(
|
||||
name=self._device_info.name,
|
||||
zwave_home_id=self._device_info.zwave_home_id or None,
|
||||
zwave_home_id=self._device_info.zwave_home_id,
|
||||
ip_address=self._connected_address,
|
||||
port=self._port,
|
||||
noise_psk=self._noise_psk,
|
||||
|
||||
@@ -491,13 +491,30 @@ class RuntimeEntryData:
|
||||
|
||||
assert self.client.connected_address
|
||||
|
||||
# If the device does not have a zwave_home_id, it means
|
||||
# either the Z-Wave controller has never been connected
|
||||
# to the ESPHome device, or the Z-Wave controller has
|
||||
# never been provisioned with a home ID (brand new).
|
||||
# Since we cannot tell the difference, and it could
|
||||
# just be the cable is unplugged we only
|
||||
# automatically start the flow if we have a home ID.
|
||||
if not device_info.zwave_home_id:
|
||||
return
|
||||
|
||||
self.async_create_zwave_js_flow(hass, device_info, device_info.zwave_home_id)
|
||||
|
||||
def async_create_zwave_js_flow(
|
||||
self, hass: HomeAssistant, device_info: DeviceInfo, zwave_home_id: int
|
||||
) -> None:
|
||||
"""Create a zwave_js config flow for a Z-Wave JS Proxy device."""
|
||||
assert self.client.connected_address is not None
|
||||
discovery_flow.async_create_flow(
|
||||
hass,
|
||||
"zwave_js",
|
||||
{"source": config_entries.SOURCE_ESPHOME},
|
||||
ESPHomeServiceInfo(
|
||||
name=device_info.name,
|
||||
zwave_home_id=device_info.zwave_home_id or None,
|
||||
zwave_home_id=zwave_home_id,
|
||||
ip_address=self.client.connected_address,
|
||||
port=self.client.port,
|
||||
noise_psk=self.client.noise_psk,
|
||||
|
||||
@@ -6,6 +6,7 @@ import base64
|
||||
from functools import partial
|
||||
import logging
|
||||
import secrets
|
||||
import struct
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -22,6 +23,8 @@ from aioesphomeapi import (
|
||||
RequiresEncryptionAPIError,
|
||||
UserService,
|
||||
UserServiceArgType,
|
||||
ZWaveProxyRequest,
|
||||
ZWaveProxyRequestType,
|
||||
parse_log_message,
|
||||
)
|
||||
from awesomeversion import AwesomeVersion
|
||||
@@ -44,12 +47,18 @@ from homeassistant.core import (
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.exceptions import (
|
||||
HomeAssistantError,
|
||||
ServiceNotFound,
|
||||
ServiceValidationError,
|
||||
TemplateError,
|
||||
)
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
issue_registry as ir,
|
||||
json,
|
||||
template,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
@@ -84,6 +93,8 @@ from .encryption_key_storage import async_get_encryption_key_storage
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
|
||||
DEVICE_CONFLICT_ISSUE_FORMAT = "device_conflict-{}"
|
||||
UNPACK_UINT32_BE = struct.Struct(">I").unpack_from
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aioesphomeapi.api_pb2 import SubscribeLogsResponse # type: ignore[attr-defined] # noqa: I001
|
||||
@@ -268,11 +279,32 @@ class ESPHomeManager:
|
||||
elif self.entry.options.get(
|
||||
CONF_ALLOW_SERVICE_CALLS, DEFAULT_ALLOW_SERVICE_CALLS
|
||||
):
|
||||
hass.async_create_task(
|
||||
hass.services.async_call(
|
||||
domain, service_name, service_data, blocking=True
|
||||
call_id = service.call_id
|
||||
if call_id and service.wants_response:
|
||||
# Service call with response expected
|
||||
self.entry.async_create_task(
|
||||
hass,
|
||||
self._handle_service_call_with_response(
|
||||
domain,
|
||||
service_name,
|
||||
service_data,
|
||||
call_id,
|
||||
service.response_template,
|
||||
),
|
||||
)
|
||||
elif call_id:
|
||||
# Service call without response but needs success/failure notification
|
||||
self.entry.async_create_task(
|
||||
hass,
|
||||
self._handle_service_call_with_notification(
|
||||
domain, service_name, service_data, call_id
|
||||
),
|
||||
)
|
||||
else:
|
||||
# Fire and forget service call
|
||||
self.entry.async_create_task(
|
||||
hass, hass.services.async_call(domain, service_name, service_data)
|
||||
)
|
||||
)
|
||||
else:
|
||||
device_info = self.entry_data.device_info
|
||||
assert device_info is not None
|
||||
@@ -298,6 +330,98 @@ class ESPHomeManager:
|
||||
service_data,
|
||||
)
|
||||
|
||||
async def _handle_service_call_with_response(
|
||||
self,
|
||||
domain: str,
|
||||
service_name: str,
|
||||
service_data: dict,
|
||||
call_id: int,
|
||||
response_template: str | None = None,
|
||||
) -> None:
|
||||
"""Handle service call that expects a response and send response back to ESPHome."""
|
||||
try:
|
||||
# Call the service with response capture enabled
|
||||
action_response = await self.hass.services.async_call(
|
||||
domain=domain,
|
||||
service=service_name,
|
||||
service_data=service_data,
|
||||
blocking=True,
|
||||
return_response=True,
|
||||
)
|
||||
|
||||
if response_template:
|
||||
try:
|
||||
# Render response template
|
||||
tmpl = Template(response_template, self.hass)
|
||||
response = tmpl.async_render(
|
||||
variables={"response": action_response},
|
||||
strict=True,
|
||||
)
|
||||
response_dict = {"response": response}
|
||||
|
||||
except TemplateError as ex:
|
||||
raise HomeAssistantError(
|
||||
f"Error rendering response template: {ex}"
|
||||
) from ex
|
||||
else:
|
||||
response_dict = {"response": action_response}
|
||||
|
||||
# JSON encode response data for ESPHome
|
||||
response_data = json.json_bytes(response_dict)
|
||||
|
||||
except (
|
||||
ServiceNotFound,
|
||||
ServiceValidationError,
|
||||
vol.Invalid,
|
||||
HomeAssistantError,
|
||||
) as ex:
|
||||
self._send_service_call_response(
|
||||
call_id, success=False, error_message=str(ex), response_data=b""
|
||||
)
|
||||
|
||||
else:
|
||||
# Send success response back to ESPHome
|
||||
self._send_service_call_response(
|
||||
call_id=call_id,
|
||||
success=True,
|
||||
error_message="",
|
||||
response_data=response_data,
|
||||
)
|
||||
|
||||
async def _handle_service_call_with_notification(
|
||||
self, domain: str, service_name: str, service_data: dict, call_id: int
|
||||
) -> None:
|
||||
"""Handle service call that needs success/failure notification."""
|
||||
try:
|
||||
await self.hass.services.async_call(
|
||||
domain, service_name, service_data, blocking=True
|
||||
)
|
||||
except (ServiceNotFound, ServiceValidationError, vol.Invalid) as ex:
|
||||
self._send_service_call_response(call_id, False, str(ex), b"")
|
||||
else:
|
||||
self._send_service_call_response(call_id, True, "", b"")
|
||||
|
||||
def _send_service_call_response(
|
||||
self,
|
||||
call_id: int,
|
||||
success: bool,
|
||||
error_message: str,
|
||||
response_data: bytes,
|
||||
) -> None:
|
||||
"""Send service call response back to ESPHome device."""
|
||||
_LOGGER.debug(
|
||||
"Service call response for call_id %s: success=%s, error=%s",
|
||||
call_id,
|
||||
success,
|
||||
error_message,
|
||||
)
|
||||
self.cli.send_homeassistant_action_response(
|
||||
call_id,
|
||||
success,
|
||||
error_message,
|
||||
response_data,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _send_home_assistant_state(
|
||||
self, entity_id: str, attribute: str | None, state: State | None
|
||||
@@ -557,6 +681,11 @@ class ESPHomeManager:
|
||||
)
|
||||
entry_data.loaded_platforms.add(Platform.ASSIST_SATELLITE)
|
||||
|
||||
if device_info.zwave_proxy_feature_flags:
|
||||
entry_data.disconnect_callbacks.add(
|
||||
cli.subscribe_zwave_proxy_request(self._async_zwave_proxy_request)
|
||||
)
|
||||
|
||||
cli.subscribe_home_assistant_states_and_services(
|
||||
on_state=entry_data.async_update_state,
|
||||
on_service_call=self.async_on_service_call,
|
||||
@@ -568,6 +697,25 @@ class ESPHomeManager:
|
||||
_async_check_firmware_version(hass, device_info, api_version)
|
||||
_async_check_using_api_password(hass, device_info, bool(self.password))
|
||||
|
||||
def _async_zwave_proxy_request(self, request: ZWaveProxyRequest) -> None:
|
||||
"""Handle a request to create a zwave_js config flow."""
|
||||
if request.type != ZWaveProxyRequestType.HOME_ID_CHANGE:
|
||||
return
|
||||
# ESPHome will send a home id change on every connection
|
||||
# if the Z-Wave controller is connected to the ESPHome device
|
||||
# so we know for sure that the Z-Wave controller is connected
|
||||
# when we get the message. This makes it safe to start
|
||||
# the zwave_js config flow automatically even if the zwave_home_id
|
||||
# is 0 (not yet provisioned) as we know for sure the controller
|
||||
# is connected to the ESPHome device and do not have to guess
|
||||
# if it's a broken connection or Z-Wave controller or a not
|
||||
# yet provisioned controller.
|
||||
zwave_home_id: int = UNPACK_UINT32_BE(request.data[0:4])[0]
|
||||
assert self.entry_data.device_info is not None
|
||||
self.entry_data.async_create_zwave_js_flow(
|
||||
self.hass, self.entry_data.device_info, zwave_home_id
|
||||
)
|
||||
|
||||
async def on_disconnect(self, expected_disconnect: bool) -> None:
|
||||
"""Run disconnect callbacks on API disconnect."""
|
||||
entry_data = self.entry_data
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.0.0",
|
||||
"aioesphomeapi==42.2.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -180,9 +180,7 @@ class EventEntity(RestoreEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_)
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
attributes: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
attributes[ATTR_EVENT_TYPE] = self.__last_event_type
|
||||
attributes = {ATTR_EVENT_TYPE: self.__last_event_type}
|
||||
if last_event_attributes := self.__last_event_attributes:
|
||||
attributes |= last_event_attributes
|
||||
return attributes
|
||||
|
||||
@@ -385,10 +385,9 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
@final
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
def state_attributes(self) -> dict[str, float | str | None]:
|
||||
"""Return optional state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
data: dict[str, float | str | None] = {}
|
||||
supported_features = self.supported_features
|
||||
|
||||
if FanEntityFeature.DIRECTION in supported_features:
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@mib1185"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/feedreader",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["feedparser", "sgmllib3k"],
|
||||
"requirements": ["feedparser==6.0.12"]
|
||||
|
||||
26
homeassistant/components/firefly_iii/diagnostics.py
Normal file
26
homeassistant/components/firefly_iii/diagnostics.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Diagnostics for the Firefly III integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import FireflyConfigEntry
|
||||
from .coordinator import FireflyDataUpdateCoordinator
|
||||
|
||||
TO_REDACT = [CONF_API_KEY, CONF_URL]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: FireflyConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: FireflyDataUpdateCoordinator = entry.runtime_data
|
||||
|
||||
return {
|
||||
"config_entry": async_redact_data(entry.as_dict(), TO_REDACT),
|
||||
"data": {"primary_currency": coordinator.data.primary_currency.to_dict()},
|
||||
}
|
||||
@@ -111,7 +111,12 @@ class FlumeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors[CONF_PASSWORD] = "invalid_auth"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"api_url": "https://portal.flumetech.com/settings#token"
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "In order to access the Flume Personal API, you will need to request a 'Client ID' and 'Client Secret' at https://portal.flumetech.com/settings#token",
|
||||
"description": "In order to access the Flume Personal API, you will need to request a 'Client ID' and 'Client Secret' at {api_url}",
|
||||
"title": "Connect to your Flume account",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.helpers import aiohttp_client
|
||||
from .const import DOMAIN
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str})
|
||||
API_KEY_URL = "https://freedompro.eu/"
|
||||
|
||||
|
||||
class Hub:
|
||||
@@ -53,7 +54,11 @@ class FreedomProConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Show the setup form to the user."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders={
|
||||
"api_key_url": API_KEY_URL,
|
||||
},
|
||||
)
|
||||
|
||||
errors = {}
|
||||
@@ -68,7 +73,12 @@ class FreedomProConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title="Freedompro", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"api_key_url": API_KEY_URL,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"description": "Please enter the API key obtained from https://home.freedompro.eu",
|
||||
"description": "Please enter the API key obtained from {api_key_url}",
|
||||
"title": "Freedompro API key"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
@@ -16,6 +17,7 @@ from fritzconnection.core.exceptions import (
|
||||
FritzConnectionException,
|
||||
FritzSecurityError,
|
||||
)
|
||||
from fritzconnection.lib.fritzcall import FritzCall
|
||||
from fritzconnection.lib.fritzhosts import FritzHosts
|
||||
from fritzconnection.lib.fritzstatus import FritzStatus
|
||||
from fritzconnection.lib.fritzwlan import FritzGuestWLAN
|
||||
@@ -120,6 +122,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self.fritz_guest_wifi: FritzGuestWLAN = None
|
||||
self.fritz_hosts: FritzHosts = None
|
||||
self.fritz_status: FritzStatus = None
|
||||
self.fritz_call: FritzCall = None
|
||||
self.host = host
|
||||
self.mesh_role = MeshRoles.NONE
|
||||
self.mesh_wifi_uplink = False
|
||||
@@ -183,6 +186,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self.fritz_hosts = FritzHosts(fc=self.connection)
|
||||
self.fritz_guest_wifi = FritzGuestWLAN(fc=self.connection)
|
||||
self.fritz_status = FritzStatus(fc=self.connection)
|
||||
self.fritz_call = FritzCall(fc=self.connection)
|
||||
info = self.fritz_status.get_device_info()
|
||||
|
||||
_LOGGER.debug(
|
||||
@@ -617,6 +621,14 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||
self.fritz_guest_wifi.set_password, password, length
|
||||
)
|
||||
|
||||
async def async_trigger_dial(self, number: str, max_ring_seconds: int) -> None:
|
||||
"""Trigger service to dial a number."""
|
||||
try:
|
||||
await self.hass.async_add_executor_job(self.fritz_call.dial, number)
|
||||
await asyncio.sleep(max_ring_seconds)
|
||||
finally:
|
||||
await self.hass.async_add_executor_job(self.fritz_call.hangup)
|
||||
|
||||
async def async_trigger_cleanup(self) -> None:
|
||||
"""Trigger device trackers cleanup."""
|
||||
_LOGGER.debug("Device tracker cleanup triggered")
|
||||
|
||||
@@ -62,6 +62,9 @@
|
||||
},
|
||||
"set_guest_wifi_password": {
|
||||
"service": "mdi:form-textbox-password"
|
||||
},
|
||||
"dial": {
|
||||
"service": "mdi:phone-dial"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/fritz",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==0.13.0"],
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
|
||||
from fritzconnection.core.exceptions import (
|
||||
FritzActionError,
|
||||
FritzActionFailedError,
|
||||
FritzConnectionException,
|
||||
FritzServiceError,
|
||||
)
|
||||
@@ -27,6 +28,14 @@ SERVICE_SCHEMA_SET_GUEST_WIFI_PW = vol.Schema(
|
||||
vol.Optional("length"): vol.Range(min=8, max=63),
|
||||
}
|
||||
)
|
||||
SERVICE_DIAL = "dial"
|
||||
SERVICE_SCHEMA_DIAL = vol.Schema(
|
||||
{
|
||||
vol.Required("device_id"): str,
|
||||
vol.Required("number"): str,
|
||||
vol.Required("max_ring_seconds"): vol.Range(min=1, max=300),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _async_set_guest_wifi_password(service_call: ServiceCall) -> None:
|
||||
@@ -65,6 +74,46 @@ async def _async_set_guest_wifi_password(service_call: ServiceCall) -> None:
|
||||
) from ex
|
||||
|
||||
|
||||
async def _async_dial(service_call: ServiceCall) -> None:
|
||||
"""Call Fritz dial service."""
|
||||
target_entry_ids = await async_extract_config_entry_ids(service_call)
|
||||
target_entries: list[FritzConfigEntry] = [
|
||||
loaded_entry
|
||||
for loaded_entry in service_call.hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
if loaded_entry.entry_id in target_entry_ids
|
||||
]
|
||||
|
||||
if not target_entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="config_entry_not_found",
|
||||
translation_placeholders={"service": service_call.service},
|
||||
)
|
||||
|
||||
for target_entry in target_entries:
|
||||
_LOGGER.debug("Executing service %s", service_call.service)
|
||||
avm_wrapper = target_entry.runtime_data
|
||||
try:
|
||||
await avm_wrapper.async_trigger_dial(
|
||||
service_call.data["number"],
|
||||
max_ring_seconds=service_call.data["max_ring_seconds"],
|
||||
)
|
||||
except (FritzServiceError, FritzActionError) as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="service_parameter_unknown"
|
||||
) from ex
|
||||
except FritzActionFailedError as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="service_dial_failed"
|
||||
) from ex
|
||||
except FritzConnectionException as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="service_not_supported"
|
||||
) from ex
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up services for Fritz integration."""
|
||||
@@ -75,3 +124,4 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
_async_set_guest_wifi_password,
|
||||
SERVICE_SCHEMA_SET_GUEST_WIFI_PW,
|
||||
)
|
||||
hass.services.async_register(DOMAIN, SERVICE_DIAL, _async_dial, SERVICE_SCHEMA_DIAL)
|
||||
|
||||
@@ -17,3 +17,24 @@ set_guest_wifi_password:
|
||||
number:
|
||||
min: 8
|
||||
max: 63
|
||||
dial:
|
||||
fields:
|
||||
device_id:
|
||||
required: true
|
||||
selector:
|
||||
device:
|
||||
integration: fritz
|
||||
entity:
|
||||
device_class: connectivity
|
||||
number:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
max_ring_seconds:
|
||||
default: 15
|
||||
required: true
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 300
|
||||
unit_of_measurement: seconds
|
||||
|
||||
@@ -198,12 +198,33 @@
|
||||
"description": "Length of the new password. It will be auto-generated if no password is set."
|
||||
}
|
||||
}
|
||||
},
|
||||
"dial": {
|
||||
"name": "Dial a phone number",
|
||||
"description": "Makes the FRITZ!Box dial a phone number.",
|
||||
"fields": {
|
||||
"device_id": {
|
||||
"name": "FRITZ!Box device",
|
||||
"description": "Select the FRITZ!Box to dial from."
|
||||
},
|
||||
"number": {
|
||||
"name": "Phone number",
|
||||
"description": "The phone number to dial."
|
||||
},
|
||||
"max_ring_seconds": {
|
||||
"name": "Maximum ring duration",
|
||||
"description": "The maximum number of seconds to ring after dialing."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"config_entry_not_found": {
|
||||
"message": "Failed to perform action \"{service}\". Config entry for target not found"
|
||||
},
|
||||
"service_dial_failed": {
|
||||
"message": "Failed to dial, check if the click to dial service of the FRITZ!Box is activated"
|
||||
},
|
||||
"service_parameter_unknown": {
|
||||
"message": "Action or parameter unknown"
|
||||
},
|
||||
|
||||
@@ -128,7 +128,7 @@ class GardenaBluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
return await self.async_step_confirm()
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or not _is_supported(discovery_info):
|
||||
|
||||
@@ -101,9 +101,7 @@ class GeolocationEvent(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of this external event."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
|
||||
data[ATTR_SOURCE] = self.source
|
||||
data: dict[str, Any] = {ATTR_SOURCE: self.source}
|
||||
if self.latitude is not None:
|
||||
data[ATTR_LATITUDE] = round(self.latitude, 5)
|
||||
if self.longitude is not None:
|
||||
|
||||
@@ -6,4 +6,4 @@ CONF_DEBUG_UI = "debug_ui"
|
||||
DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
RECOMMENDED_VERSION = "1.9.9"
|
||||
RECOMMENDED_VERSION = "1.9.11"
|
||||
|
||||
@@ -186,6 +186,7 @@ async def async_setup_entry(
|
||||
class InverterSensor(CoordinatorEntity[GoodweUpdateCoordinator], SensorEntity):
|
||||
"""Entity representing individual inverter sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: GoodweSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
|
||||
@@ -59,6 +59,7 @@ class GoogleGenerativeAITextToSpeechEntity(
|
||||
"en-US",
|
||||
"es-US",
|
||||
"fr-FR",
|
||||
"he-IL",
|
||||
"hi-IN",
|
||||
"id-ID",
|
||||
"it-IT",
|
||||
|
||||
@@ -72,6 +72,7 @@ PLATFORMS = [
|
||||
Platform.NOTIFY,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VALVE,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -35,6 +35,7 @@ from .media_player import MediaPlayerGroup, async_create_preview_media_player
|
||||
from .notify import async_create_preview_notify
|
||||
from .sensor import async_create_preview_sensor
|
||||
from .switch import async_create_preview_switch
|
||||
from .valve import async_create_preview_valve
|
||||
|
||||
_STATISTIC_MEASURES = [
|
||||
"last",
|
||||
@@ -172,6 +173,7 @@ GROUP_TYPES = [
|
||||
"notify",
|
||||
"sensor",
|
||||
"switch",
|
||||
"valve",
|
||||
]
|
||||
|
||||
|
||||
@@ -253,6 +255,11 @@ CONFIG_FLOW = {
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("switch"),
|
||||
),
|
||||
"valve": SchemaFlowFormStep(
|
||||
basic_group_config_schema("valve"),
|
||||
preview="group",
|
||||
validate_user_input=set_group_type("valve"),
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -302,6 +309,10 @@ OPTIONS_FLOW = {
|
||||
partial(light_switch_options_schema, "switch"),
|
||||
preview="group",
|
||||
),
|
||||
"valve": SchemaFlowFormStep(
|
||||
partial(basic_group_options_schema, "valve"),
|
||||
preview="group",
|
||||
),
|
||||
}
|
||||
|
||||
PREVIEW_OPTIONS_SCHEMA: dict[str, vol.Schema] = {}
|
||||
@@ -321,6 +332,7 @@ CREATE_PREVIEW_ENTITY: dict[
|
||||
"notify": async_create_preview_notify,
|
||||
"sensor": async_create_preview_sensor,
|
||||
"switch": async_create_preview_switch,
|
||||
"valve": async_create_preview_valve,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .entity import GroupEntity
|
||||
from .util import find_state_attributes, mean_tuple, reduce_attribute
|
||||
from .util import find_state_attributes, mean_circle, mean_tuple, reduce_attribute
|
||||
|
||||
DEFAULT_NAME = "Light Group"
|
||||
CONF_ALL = "all"
|
||||
@@ -229,7 +229,7 @@ class LightGroup(GroupEntity, LightEntity):
|
||||
self._attr_brightness = reduce_attribute(on_states, ATTR_BRIGHTNESS)
|
||||
|
||||
self._attr_hs_color = reduce_attribute(
|
||||
on_states, ATTR_HS_COLOR, reduce=mean_tuple
|
||||
on_states, ATTR_HS_COLOR, reduce=mean_circle
|
||||
)
|
||||
self._attr_rgb_color = reduce_attribute(
|
||||
on_states, ATTR_RGB_COLOR, reduce=mean_tuple
|
||||
|
||||
@@ -16,7 +16,8 @@
|
||||
"media_player": "Media player group",
|
||||
"notify": "Notify group",
|
||||
"sensor": "Sensor group",
|
||||
"switch": "Switch group"
|
||||
"switch": "Switch group",
|
||||
"valve": "Valve group"
|
||||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
@@ -127,6 +128,18 @@
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
},
|
||||
"valve": {
|
||||
"title": "[%key:component::group::config::step::user::title%]",
|
||||
"data": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
|
||||
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
|
||||
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -212,6 +225,16 @@
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
},
|
||||
"valve": {
|
||||
"data": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data::all%]",
|
||||
"entities": "[%key:component::group::config::step::binary_sensor::data::entities%]",
|
||||
"hide_members": "[%key:component::group::config::step::binary_sensor::data::hide_members%]"
|
||||
},
|
||||
"data_description": {
|
||||
"all": "[%key:component::group::config::step::binary_sensor::data_description::all%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Iterator
|
||||
from itertools import groupby
|
||||
from math import atan2, cos, degrees, radians, sin
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import State
|
||||
@@ -32,6 +33,23 @@ def mean_tuple(*args: Any) -> tuple[float | Any, ...]:
|
||||
return tuple(sum(x) / len(x) for x in zip(*args, strict=False))
|
||||
|
||||
|
||||
def mean_circle(*args: Any) -> tuple[float | Any, ...]:
|
||||
"""Return the circular mean of hue values and arithmetic mean of saturation values from HS color tuples."""
|
||||
if not args:
|
||||
return ()
|
||||
|
||||
hues, saturations = zip(*args, strict=False)
|
||||
|
||||
sum_x = sum(cos(radians(h)) for h in hues)
|
||||
sum_y = sum(sin(radians(h)) for h in hues)
|
||||
|
||||
mean_angle = degrees(atan2(sum_y, sum_x)) % 360
|
||||
|
||||
saturation = sum(saturations) / len(saturations)
|
||||
|
||||
return (mean_angle, saturation)
|
||||
|
||||
|
||||
def attribute_equal(states: list[State], key: str) -> bool:
|
||||
"""Return True if all attributes found matching key from states are equal.
|
||||
|
||||
|
||||
262
homeassistant/components/group/valve.py
Normal file
262
homeassistant/components/group/valve.py
Normal file
@@ -0,0 +1,262 @@
|
||||
"""Platform allowing several valves to be grouped into one valve."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.valve import (
|
||||
ATTR_CURRENT_POSITION,
|
||||
ATTR_POSITION,
|
||||
DOMAIN as VALVE_DOMAIN,
|
||||
PLATFORM_SCHEMA as VALVE_PLATFORM_SCHEMA,
|
||||
ValveEntity,
|
||||
ValveEntityFeature,
|
||||
ValveState,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_SUPPORTED_FEATURES,
|
||||
CONF_ENTITIES,
|
||||
CONF_NAME,
|
||||
CONF_UNIQUE_ID,
|
||||
SERVICE_CLOSE_VALVE,
|
||||
SERVICE_OPEN_VALVE,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
SERVICE_STOP_VALVE,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .entity import GroupEntity
|
||||
from .util import reduce_attribute
|
||||
|
||||
KEY_OPEN_CLOSE = "open_close"
|
||||
KEY_STOP = "stop"
|
||||
KEY_SET_POSITION = "set_position"
|
||||
|
||||
DEFAULT_NAME = "Valve Group"
|
||||
|
||||
# No limit on parallel updates to enable a group calling another group
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
PLATFORM_SCHEMA = VALVE_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ENTITIES): cv.entities_domain(VALVE_DOMAIN),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Valve Group platform."""
|
||||
async_add_entities(
|
||||
[
|
||||
ValveGroup(
|
||||
config.get(CONF_UNIQUE_ID), config[CONF_NAME], config[CONF_ENTITIES]
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Initialize Valve Group config entry."""
|
||||
registry = er.async_get(hass)
|
||||
entities = er.async_validate_entity_ids(
|
||||
registry, config_entry.options[CONF_ENTITIES]
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
[ValveGroup(config_entry.entry_id, config_entry.title, entities)]
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_preview_valve(
|
||||
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
|
||||
) -> ValveGroup:
|
||||
"""Create a preview valve."""
|
||||
return ValveGroup(
|
||||
None,
|
||||
name,
|
||||
validated_config[CONF_ENTITIES],
|
||||
)
|
||||
|
||||
|
||||
class ValveGroup(GroupEntity, ValveEntity):
|
||||
"""Representation of a ValveGroup."""
|
||||
|
||||
_attr_available: bool = False
|
||||
_attr_current_valve_position: int | None = None
|
||||
_attr_is_closed: bool | None = None
|
||||
_attr_is_closing: bool | None = False
|
||||
_attr_is_opening: bool | None = False
|
||||
_attr_reports_position: bool = False
|
||||
|
||||
def __init__(self, unique_id: str | None, name: str, entities: list[str]) -> None:
|
||||
"""Initialize a ValveGroup entity."""
|
||||
self._entity_ids = entities
|
||||
self._valves: dict[str, set[str]] = {
|
||||
KEY_OPEN_CLOSE: set(),
|
||||
KEY_STOP: set(),
|
||||
KEY_SET_POSITION: set(),
|
||||
}
|
||||
|
||||
self._attr_name = name
|
||||
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entities}
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@callback
|
||||
def async_update_supported_features(
|
||||
self,
|
||||
entity_id: str,
|
||||
new_state: State | None,
|
||||
) -> None:
|
||||
"""Update dictionaries with supported features."""
|
||||
if not new_state:
|
||||
for values in self._valves.values():
|
||||
values.discard(entity_id)
|
||||
return
|
||||
|
||||
features = new_state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
|
||||
if features & (ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE):
|
||||
self._valves[KEY_OPEN_CLOSE].add(entity_id)
|
||||
else:
|
||||
self._valves[KEY_OPEN_CLOSE].discard(entity_id)
|
||||
if features & (ValveEntityFeature.STOP):
|
||||
self._valves[KEY_STOP].add(entity_id)
|
||||
else:
|
||||
self._valves[KEY_STOP].discard(entity_id)
|
||||
if features & (ValveEntityFeature.SET_POSITION):
|
||||
self._valves[KEY_SET_POSITION].add(entity_id)
|
||||
else:
|
||||
self._valves[KEY_SET_POSITION].discard(entity_id)
|
||||
|
||||
async def async_open_valve(self) -> None:
|
||||
"""Open the valves."""
|
||||
data = {ATTR_ENTITY_ID: self._valves[KEY_OPEN_CLOSE]}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN, SERVICE_OPEN_VALVE, data, blocking=True, context=self._context
|
||||
)
|
||||
|
||||
async def async_handle_open_valve(self) -> None: # type: ignore[misc]
|
||||
"""Open the valves.
|
||||
|
||||
Override the base class to avoid calling the set position service
|
||||
for all valves. Transfer the service call to the base class and let
|
||||
it decide if the valve uses set position or open service.
|
||||
"""
|
||||
await self.async_open_valve()
|
||||
|
||||
async def async_close_valve(self) -> None:
|
||||
"""Close valves."""
|
||||
data = {ATTR_ENTITY_ID: self._valves[KEY_OPEN_CLOSE]}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN,
|
||||
SERVICE_CLOSE_VALVE,
|
||||
data,
|
||||
blocking=True,
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
async def async_handle_close_valve(self) -> None: # type: ignore[misc]
|
||||
"""Close the valves.
|
||||
|
||||
Override the base class to avoid calling the set position service
|
||||
for all valves. Transfer the service call to the base class and let
|
||||
it decide if the valve uses set position or close service.
|
||||
"""
|
||||
await self.async_close_valve()
|
||||
|
||||
async def async_set_valve_position(self, position: int) -> None:
|
||||
"""Move the valves to a specific position."""
|
||||
data = {
|
||||
ATTR_ENTITY_ID: self._valves[KEY_SET_POSITION],
|
||||
ATTR_POSITION: position,
|
||||
}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN,
|
||||
SERVICE_SET_VALVE_POSITION,
|
||||
data,
|
||||
blocking=True,
|
||||
context=self._context,
|
||||
)
|
||||
|
||||
async def async_stop_valve(self) -> None:
|
||||
"""Stop the valves."""
|
||||
data = {ATTR_ENTITY_ID: self._valves[KEY_STOP]}
|
||||
await self.hass.services.async_call(
|
||||
VALVE_DOMAIN, SERVICE_STOP_VALVE, data, blocking=True, context=self._context
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Update state and attributes."""
|
||||
states = [
|
||||
state
|
||||
for entity_id in self._entity_ids
|
||||
if (state := self.hass.states.get(entity_id)) is not None
|
||||
]
|
||||
|
||||
# Set group as unavailable if all members are unavailable or missing
|
||||
self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states)
|
||||
|
||||
self._attr_is_closed = True
|
||||
self._attr_is_closing = False
|
||||
self._attr_is_opening = False
|
||||
self._attr_reports_position = False
|
||||
self._update_assumed_state_from_members()
|
||||
for state in states:
|
||||
if state.attributes.get(ATTR_CURRENT_POSITION) is not None:
|
||||
self._attr_reports_position = True
|
||||
if state.state == ValveState.OPEN:
|
||||
self._attr_is_closed = False
|
||||
continue
|
||||
if state.state == ValveState.CLOSED:
|
||||
continue
|
||||
if state.state == ValveState.CLOSING:
|
||||
self._attr_is_closing = True
|
||||
continue
|
||||
if state.state == ValveState.OPENING:
|
||||
self._attr_is_opening = True
|
||||
continue
|
||||
|
||||
valid_state = any(
|
||||
state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) for state in states
|
||||
)
|
||||
if not valid_state:
|
||||
# Set as unknown if all members are unknown or unavailable
|
||||
self._attr_is_closed = None
|
||||
|
||||
self._attr_current_valve_position = reduce_attribute(
|
||||
states, ATTR_CURRENT_POSITION
|
||||
)
|
||||
|
||||
supported_features = ValveEntityFeature(0)
|
||||
if self._valves[KEY_OPEN_CLOSE]:
|
||||
supported_features |= ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
|
||||
if self._valves[KEY_STOP]:
|
||||
supported_features |= ValveEntityFeature.STOP
|
||||
if self._valves[KEY_SET_POSITION]:
|
||||
supported_features |= ValveEntityFeature.SET_POSITION
|
||||
self._attr_supported_features = supported_features
|
||||
@@ -72,8 +72,7 @@ async def async_setup_entry(
|
||||
config_entry.runtime_data = coordinator
|
||||
|
||||
party = coordinator.data.user.party.id
|
||||
if HABITICA_KEY not in hass.data:
|
||||
hass.data[HABITICA_KEY] = {}
|
||||
hass.data.setdefault(HABITICA_KEY, {})
|
||||
|
||||
if party is not None and party not in hass.data[HABITICA_KEY]:
|
||||
party_coordinator = HabiticaPartyCoordinator(hass, config_entry, api)
|
||||
@@ -117,9 +116,20 @@ async def async_setup_entry(
|
||||
coordinator.async_add_listener(_party_update_listener)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
config_entry.async_on_unload(
|
||||
config_entry.add_update_listener(_async_update_listener)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def _async_update_listener(
|
||||
hass: HomeAssistant, entry: HabiticaConfigEntry
|
||||
) -> None:
|
||||
"""Handle update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def shutdown_party_coordinator(hass: HomeAssistant, party_added: UUID) -> None:
|
||||
"""Handle party coordinator shutdown."""
|
||||
await hass.data[HABITICA_KEY][party_added].async_shutdown()
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
@@ -17,7 +18,14 @@ from habiticalib import (
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_NAME,
|
||||
@@ -26,15 +34,21 @@ from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from . import HABITICA_KEY
|
||||
from .const import (
|
||||
CONF_API_USER,
|
||||
CONF_PARTY_MEMBER,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
FORGOT_PASSWORD_URL,
|
||||
@@ -374,3 +388,66 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return errors, user.data
|
||||
|
||||
return errors, None
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"party_member": PartyMembersSubentryFlowHandler}
|
||||
|
||||
|
||||
class PartyMembersSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle subentry flow for adding party members."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Subentry user flow."""
|
||||
|
||||
entry: HabiticaConfigEntry = self._get_entry()
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
return self.async_abort(reason="config_entry_disabled")
|
||||
if (party := entry.runtime_data.data.user.party.id) is None:
|
||||
return self.async_abort(reason="not_in_a_party")
|
||||
|
||||
party_members = self.hass.data[HABITICA_KEY][party].data.members
|
||||
|
||||
if user_input is not None:
|
||||
config_entries = self.hass.config_entries.async_entries(DOMAIN)
|
||||
|
||||
for entry in config_entries:
|
||||
if user_input[CONF_PARTY_MEMBER] == entry.unique_id:
|
||||
return self.async_abort(reason="already_configured_as_entry")
|
||||
if user_input[CONF_PARTY_MEMBER] in {
|
||||
subentry.unique_id for subentry in entry.subentries.values()
|
||||
}:
|
||||
return self.async_abort(reason="already_configured")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=party_members[UUID(user_input[CONF_PARTY_MEMBER])].profile.name,
|
||||
data={},
|
||||
unique_id=user_input[CONF_PARTY_MEMBER],
|
||||
)
|
||||
|
||||
options = [
|
||||
SelectOptionDict(
|
||||
value=str(member_id),
|
||||
label=f"{member.profile.name} (@{member.auth.local.username})",
|
||||
)
|
||||
for member_id, member in party_members.items()
|
||||
if member_id != str(entry.runtime_data.data.user.id)
|
||||
and member.profile.name
|
||||
and member.auth.local.username
|
||||
]
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PARTY_MEMBER): SelectSelector(
|
||||
SelectSelectorConfig(options=options)
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from homeassistant.const import APPLICATION_NAME, __version__
|
||||
|
||||
CONF_API_USER = "api_user"
|
||||
CONF_PARTY_MEMBER = "party_member"
|
||||
|
||||
DEFAULT_URL = "https://habitica.com"
|
||||
ASSETS_URL = "https://habitica-assets.s3.amazonaws.com/mobileApp/images/"
|
||||
|
||||
@@ -213,7 +213,9 @@ class HabiticaPartyCoordinator(HabiticaBaseCoordinator[HabiticaPartyData]):
|
||||
party=(await self.habitica.get_group()).data,
|
||||
members={
|
||||
member.id: member
|
||||
for member in (await self.habitica.get_group_members()).data
|
||||
for member in (
|
||||
await self.habitica.get_group_members(public_fields=True)
|
||||
).data
|
||||
if member.id
|
||||
},
|
||||
)
|
||||
|
||||
@@ -3,10 +3,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from habiticalib import ContentData
|
||||
from habiticalib import ContentData, UserData
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
@@ -29,26 +31,84 @@ class HabiticaBase(CoordinatorEntity[HabiticaDataUpdateCoordinator]):
|
||||
self,
|
||||
coordinator: HabiticaDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
subentry: ConfigSubentry | None = None,
|
||||
) -> None:
|
||||
"""Initialize a Habitica entity."""
|
||||
super().__init__(coordinator)
|
||||
if TYPE_CHECKING:
|
||||
assert coordinator.config_entry.unique_id
|
||||
assert self.user
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.config_entry.unique_id}_{entity_description.key}"
|
||||
self.subentry = subentry
|
||||
unique_id = (
|
||||
subentry.unique_id
|
||||
if subentry is not None and subentry.unique_id
|
||||
else coordinator.config_entry.unique_id
|
||||
)
|
||||
|
||||
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=NAME,
|
||||
name=coordinator.data.user.profile.name,
|
||||
name=self.user.profile.name,
|
||||
configuration_url=(
|
||||
URL(coordinator.config_entry.data[CONF_URL])
|
||||
/ "profile"
|
||||
/ coordinator.config_entry.unique_id
|
||||
URL(coordinator.config_entry.data[CONF_URL]) / "profile" / unique_id
|
||||
),
|
||||
identifiers={(DOMAIN, coordinator.config_entry.unique_id)},
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
)
|
||||
|
||||
if subentry:
|
||||
self._attr_device_info.update(
|
||||
DeviceInfo(
|
||||
via_device=(
|
||||
(
|
||||
DOMAIN,
|
||||
f"{coordinator.config_entry.unique_id}_{self.user.party.id}",
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def user(self) -> UserData | None:
|
||||
"""Return the user data."""
|
||||
return self.coordinator.data.user
|
||||
|
||||
|
||||
class HabiticaPartyMemberBase(HabiticaBase):
|
||||
"""Base Habitica party member entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: HabiticaDataUpdateCoordinator,
|
||||
party_coordinator: HabiticaPartyCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
subentry: ConfigSubentry | None = None,
|
||||
) -> None:
|
||||
"""Initialize a Habitica entity."""
|
||||
self.party_coordinator = party_coordinator
|
||||
super().__init__(coordinator, entity_description, subentry)
|
||||
|
||||
@property
|
||||
def user(self) -> UserData | None:
|
||||
"""Return the user data of the party member."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.subentry
|
||||
assert self.subentry.unique_id
|
||||
return self.party_coordinator.data.members.get(UUID(self.subentry.unique_id))
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
|
||||
return super().available and self.user is not None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
self.party_coordinator.async_add_listener(self._handle_coordinator_update)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -3,10 +3,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from habiticalib import Avatar, ContentData, extract_avatar
|
||||
|
||||
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -18,7 +21,7 @@ from .coordinator import (
|
||||
HabiticaDataUpdateCoordinator,
|
||||
HabiticaPartyCoordinator,
|
||||
)
|
||||
from .entity import HabiticaBase, HabiticaPartyBase
|
||||
from .entity import HabiticaBase, HabiticaPartyBase, HabiticaPartyMemberBase
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -47,6 +50,22 @@ async def async_setup_entry(
|
||||
hass, party_coordinator, config_entry, coordinator.content
|
||||
)
|
||||
)
|
||||
for subentry_id, subentry in config_entry.subentries.items():
|
||||
if (
|
||||
subentry.unique_id
|
||||
and UUID(subentry.unique_id) in party_coordinator.data.members
|
||||
):
|
||||
async_add_entities(
|
||||
[
|
||||
HabiticaPartyMemberImage(
|
||||
hass,
|
||||
coordinator,
|
||||
party_coordinator,
|
||||
subentry,
|
||||
)
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -66,18 +85,21 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
coordinator: HabiticaDataUpdateCoordinator,
|
||||
subentry: ConfigSubentry | None = None,
|
||||
) -> None:
|
||||
"""Initialize the image entity."""
|
||||
super().__init__(coordinator, self.entity_description)
|
||||
HabiticaBase.__init__(self, coordinator, self.entity_description, subentry)
|
||||
ImageEntity.__init__(self, hass)
|
||||
self._attr_image_last_updated = dt_util.utcnow()
|
||||
self._avatar = extract_avatar(self.coordinator.data.user)
|
||||
if TYPE_CHECKING:
|
||||
assert self.user
|
||||
self._avatar = extract_avatar(self.user)
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Check if equipped gear and other things have changed since last avatar image generation."""
|
||||
|
||||
if self._avatar != self.coordinator.data.user:
|
||||
self._avatar = extract_avatar(self.coordinator.data.user)
|
||||
if self.user is not None and self._avatar != self.user:
|
||||
self._avatar = extract_avatar(self.user)
|
||||
self._attr_image_last_updated = dt_util.utcnow()
|
||||
self._cache = None
|
||||
|
||||
@@ -90,6 +112,24 @@ class HabiticaImage(HabiticaBase, ImageEntity):
|
||||
return self._cache
|
||||
|
||||
|
||||
class HabiticaPartyMemberImage(HabiticaImage, HabiticaPartyMemberBase):
|
||||
"""A Habitica party member image entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
coordinator: HabiticaDataUpdateCoordinator,
|
||||
party_coordinator: HabiticaPartyCoordinator,
|
||||
subentry: ConfigSubentry | None = None,
|
||||
) -> None:
|
||||
"""Initialize the image entity."""
|
||||
|
||||
HabiticaPartyMemberBase.__init__(
|
||||
self, coordinator, party_coordinator, self.entity_description, subentry
|
||||
)
|
||||
super().__init__(hass, coordinator, subentry)
|
||||
|
||||
|
||||
class HabiticaPartyImage(HabiticaPartyBase, ImageEntity):
|
||||
"""A Habitica image entity of a party."""
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/habitica",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["habiticalib"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -68,8 +68,8 @@ rules:
|
||||
icon-translations: done
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: done
|
||||
comment: Used to inform of deprecated entities and actions.
|
||||
status: exempt
|
||||
comment: Integration has no repairs
|
||||
stale-devices:
|
||||
status: done
|
||||
comment: Party device is remove if stale.
|
||||
|
||||
@@ -8,6 +8,7 @@ from datetime import datetime
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from habiticalib import ContentData, GroupData, HabiticaClass, TaskData, UserData, ha
|
||||
|
||||
@@ -24,7 +25,7 @@ from homeassistant.util import dt as dt_util
|
||||
from . import HABITICA_KEY
|
||||
from .const import ASSETS_URL
|
||||
from .coordinator import HabiticaConfigEntry
|
||||
from .entity import HabiticaBase, HabiticaPartyBase
|
||||
from .entity import HabiticaBase, HabiticaPartyBase, HabiticaPartyMemberBase
|
||||
from .util import (
|
||||
collected_quest_items,
|
||||
get_attribute_points,
|
||||
@@ -118,12 +119,13 @@ class HabiticaSensorEntity(StrEnum):
|
||||
LAST_CHECKIN = "last_checkin"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
SENSOR_DESCRIPTIONS_COMMON: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.DISPLAY_NAME,
|
||||
translation_key=HabiticaSensorEntity.DISPLAY_NAME,
|
||||
value_fn=lambda user, _: user.profile.name,
|
||||
attributes_fn=lambda user, _: {
|
||||
"username": f"@{user.auth.local.username}",
|
||||
"blurb": user.profile.blurb,
|
||||
"joined": (
|
||||
dt_util.as_local(joined).date()
|
||||
@@ -175,13 +177,6 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
translation_key=HabiticaSensorEntity.LEVEL,
|
||||
value_fn=lambda user, _: user.stats.lvl,
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.GOLD,
|
||||
translation_key=HabiticaSensorEntity.GOLD,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda user, _: user.stats.gp,
|
||||
entity_picture=ha.GP,
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.CLASS,
|
||||
translation_key=HabiticaSensorEntity.CLASS,
|
||||
@@ -189,21 +184,6 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
options=[item.value for item in HabiticaClass],
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.GEMS,
|
||||
translation_key=HabiticaSensorEntity.GEMS,
|
||||
value_fn=lambda user, _: None if (b := user.balance) is None else round(b * 4),
|
||||
suggested_display_precision=0,
|
||||
entity_picture="shop_gem.png",
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.TRINKETS,
|
||||
translation_key=HabiticaSensorEntity.TRINKETS,
|
||||
value_fn=lambda user, _: user.purchased.plan.consecutive.trinkets,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="⧖",
|
||||
entity_picture="notif_subscriber_reward.png",
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.STRENGTH,
|
||||
translation_key=HabiticaSensorEntity.STRENGTH,
|
||||
@@ -236,6 +216,40 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="CON",
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
translation_key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
value_fn=(
|
||||
lambda user, _: dt_util.as_local(last)
|
||||
if (last := user.auth.timestamps.loggedin)
|
||||
else None
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
)
|
||||
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.GOLD,
|
||||
translation_key=HabiticaSensorEntity.GOLD,
|
||||
suggested_display_precision=2,
|
||||
value_fn=lambda user, _: user.stats.gp,
|
||||
entity_picture=ha.GP,
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.GEMS,
|
||||
translation_key=HabiticaSensorEntity.GEMS,
|
||||
value_fn=lambda user, _: None if (b := user.balance) is None else round(b * 4),
|
||||
suggested_display_precision=0,
|
||||
entity_picture="shop_gem.png",
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.TRINKETS,
|
||||
translation_key=HabiticaSensorEntity.TRINKETS,
|
||||
value_fn=lambda user, _: user.purchased.plan.consecutive.trinkets,
|
||||
suggested_display_precision=0,
|
||||
native_unit_of_measurement="⧖",
|
||||
entity_picture="notif_subscriber_reward.png",
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.EGGS_TOTAL,
|
||||
translation_key=HabiticaSensorEntity.EGGS_TOTAL,
|
||||
@@ -286,16 +300,6 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
translation_key=HabiticaSensorEntity.PENDING_QUEST_ITEMS,
|
||||
value_fn=pending_quest_items,
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
translation_key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
value_fn=(
|
||||
lambda user, _: dt_util.as_local(last)
|
||||
if (last := user.auth.timestamps.loggedin)
|
||||
else None
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -389,7 +393,8 @@ async def async_setup_entry(
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
HabiticaSensor(coordinator, description) for description in SENSOR_DESCRIPTIONS
|
||||
HabiticaSensor(coordinator, description)
|
||||
for description in SENSOR_DESCRIPTIONS + SENSOR_DESCRIPTIONS_COMMON
|
||||
)
|
||||
|
||||
if party := coordinator.data.user.party.id:
|
||||
@@ -403,6 +408,23 @@ async def async_setup_entry(
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS_PARTY
|
||||
)
|
||||
for subentry_id, subentry in config_entry.subentries.items():
|
||||
if (
|
||||
subentry.unique_id
|
||||
and UUID(subentry.unique_id) in party_coordinator.data.members
|
||||
):
|
||||
async_add_entities(
|
||||
[
|
||||
HabiticaPartyMemberSensor(
|
||||
coordinator,
|
||||
party_coordinator,
|
||||
description,
|
||||
subentry,
|
||||
)
|
||||
for description in SENSOR_DESCRIPTIONS_COMMON
|
||||
],
|
||||
config_subentry_id=subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class HabiticaSensor(HabiticaBase, SensorEntity):
|
||||
@@ -414,27 +436,33 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
|
||||
def native_value(self) -> StateType | datetime:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data.user, self.coordinator.content
|
||||
return (
|
||||
self.entity_description.value_fn(self.user, self.coordinator.content)
|
||||
if self.user is not None
|
||||
else None
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, float | None] | None:
|
||||
"""Return entity specific state attributes."""
|
||||
if func := self.entity_description.attributes_fn:
|
||||
return func(self.coordinator.data.user, self.coordinator.content)
|
||||
if self.user is not None and (func := self.entity_description.attributes_fn):
|
||||
return func(self.user, self.coordinator.content)
|
||||
return None
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the entity picture to use in the frontend, if any."""
|
||||
if self.entity_description.key is HabiticaSensorEntity.CLASS and (
|
||||
_class := self.coordinator.data.user.stats.Class
|
||||
if (
|
||||
self.entity_description.key is HabiticaSensorEntity.CLASS
|
||||
and self.user is not None
|
||||
and (_class := self.user.stats.Class)
|
||||
):
|
||||
return SVG_CLASS[_class]
|
||||
|
||||
if self.entity_description.key is HabiticaSensorEntity.DISPLAY_NAME and (
|
||||
img_url := self.coordinator.data.user.profile.imageUrl
|
||||
if (
|
||||
self.entity_description.key is HabiticaSensorEntity.DISPLAY_NAME
|
||||
and self.user is not None
|
||||
and (img_url := self.user.profile.imageUrl)
|
||||
):
|
||||
return img_url
|
||||
|
||||
@@ -448,6 +476,10 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
|
||||
return None
|
||||
|
||||
|
||||
class HabiticaPartyMemberSensor(HabiticaSensor, HabiticaPartyMemberBase):
|
||||
"""Habitica party member sensor."""
|
||||
|
||||
|
||||
class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
|
||||
"""Habitica party sensor."""
|
||||
|
||||
|
||||
@@ -174,6 +174,32 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"party_member": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Party members",
|
||||
"description": "Track the stats of the adventurers in your party.",
|
||||
"data": {
|
||||
"party_member": "Party member"
|
||||
},
|
||||
"data_description": {
|
||||
"party_member": "Select an adventurer from your party to track health and other stats."
|
||||
}
|
||||
}
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add party member"
|
||||
},
|
||||
"entry_type": "Party member",
|
||||
"abort": {
|
||||
"already_configured_as_entry": "Already configured as a user. This adventurer cannot be added as a party member.",
|
||||
"already_configured": "This adventurer is already configured as a party member in this or another account.",
|
||||
"config_entry_disabled": "Cannot add party members when the main account is disabled or not loaded.",
|
||||
"not_in_a_party": "You are currently not in a party. You can only add party members when your character is in a party."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"pending_quest": {
|
||||
@@ -287,6 +313,9 @@
|
||||
},
|
||||
"total_logins": {
|
||||
"name": "Total logins"
|
||||
},
|
||||
"username": {
|
||||
"name": "[%key:common::config_flow::data::username%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -591,12 +620,6 @@
|
||||
"message": "Unable to send message, {name} not found. ({reason})"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_entity": {
|
||||
"title": "The Habitica {name} entity is deprecated",
|
||||
"description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts, disable `{entity}` and reload the integration/restart Home Assistant to fix this issue."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"cast_skill": {
|
||||
"name": "Cast a skill",
|
||||
|
||||
@@ -3,18 +3,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from homeassistant.components.remote import ATTR_ACTIVITY, ATTR_DELAY_SECS
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
from .const import HARMONY_OPTIONS_UPDATE, PLATFORMS
|
||||
from .data import HarmonyConfigEntry, HarmonyData
|
||||
from .const import HARMONY_OPTIONS_UPDATE, PLATFORMS
|
||||
from .data import HarmonyConfigEntry, HarmonyData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,10 +22,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: HarmonyConfigEntry) -> b
|
||||
# when setting up a config entry, we fallback to adding
|
||||
# the options to the config entry and pull them out here if
|
||||
# they are missing from the options
|
||||
if sys.version_info >= (3, 14):
|
||||
raise HomeAssistantError(
|
||||
"Logitech Harmony Hub is not supported on Python 3.14. Please use Python 3.13."
|
||||
)
|
||||
_async_import_options_from_data_if_missing(hass, entry)
|
||||
|
||||
address = entry.data[CONF_HOST]
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/harmony",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioharmony", "slixmpp"],
|
||||
"requirements": ["aioharmony==0.5.3;python_version<'3.14'"],
|
||||
"requirements": ["aioharmony==0.5.3"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "Logitech",
|
||||
|
||||
@@ -94,13 +94,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
|
||||
for device in devices:
|
||||
# Check if the device is still present in homee
|
||||
device_identifiers = {identifier[1] for identifier in device.identifiers}
|
||||
# homee itself uses just the uid, nodes use uid-nodeid
|
||||
is_homee_hub = homee.settings.uid in device_identifiers
|
||||
# homee itself uses just the uid, nodes use {uid}-{nodeid}
|
||||
if homee.settings.uid in device_identifiers:
|
||||
continue # Hub itself is never removed.
|
||||
is_node_present = any(
|
||||
f"{homee.settings.uid}-{node.id}" in device_identifiers
|
||||
for node in homee.nodes
|
||||
)
|
||||
if not is_node_present and not is_homee_hub:
|
||||
if not is_node_present:
|
||||
_LOGGER.info("Removing device %s", device.name)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
@@ -110,16 +111,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeeConfigEntry) -> boo
|
||||
# Remove device at runtime when node is removed in homee
|
||||
async def _remove_node_callback(node: HomeeNode, add: bool) -> None:
|
||||
"""Call when a node is removed."""
|
||||
if not add:
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, f"{entry.runtime_data.settings.uid}-{node.id}")}
|
||||
if add:
|
||||
return
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, f"{entry.runtime_data.settings.uid}-{node.id}")}
|
||||
)
|
||||
if device:
|
||||
_LOGGER.info("Removing device %s", device.name)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
if device:
|
||||
_LOGGER.info("Removing device %s", device.name)
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
|
||||
homee.add_nodes_listener(_remove_node_callback)
|
||||
|
||||
|
||||
@@ -43,18 +43,22 @@ def async_setup_forwarded(
|
||||
some proxies, for example, Kubernetes NGINX ingress, only retain one element
|
||||
in the X-Forwarded-Proto header. In that case, we'll just use what we have.
|
||||
|
||||
`X-Forwarded-Host: <host>`
|
||||
e.g., `X-Forwarded-Host: example.com`
|
||||
`X-Forwarded-Host: <host1>, <host2>, <host3>`
|
||||
e.g., `X-Forwarded-Host: example.com, proxy.example.com, backend.example.com`
|
||||
OR `X-Forwarded-Host: example.com` (one entry, even with multiple proxies)
|
||||
|
||||
If the previous headers are processed successfully, and the X-Forwarded-Host is
|
||||
present, it will be used.
|
||||
present, the last one in the list will be used (set by the proxy nearest to the backend).
|
||||
|
||||
Multiple headers are valid as stated in https://www.rfc-editor.org/rfc/rfc7239#section-7.1
|
||||
If multiple headers are present, they are handled according to
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/X-Forwarded-For#parsing
|
||||
> "split each X-Forwarded-For header by comma into lists and then join the lists."
|
||||
|
||||
Additionally:
|
||||
- If no X-Forwarded-For header is found, the processing of all headers is skipped.
|
||||
- Throw HTTP 400 status when untrusted connected peer provides
|
||||
X-Forwarded-For headers.
|
||||
- If multiple instances of X-Forwarded-For, X-Forwarded-Proto or
|
||||
X-Forwarded-Host are found, an HTTP 400 status code is thrown.
|
||||
- If malformed or invalid (IP) data in X-Forwarded-For header is found,
|
||||
an HTTP 400 status code is thrown.
|
||||
- The connected client peer on the socket of the incoming connection,
|
||||
@@ -111,15 +115,12 @@ def async_setup_forwarded(
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
# Multiple X-Forwarded-For headers
|
||||
if len(forwarded_for_headers) > 1:
|
||||
_LOGGER.error(
|
||||
"Too many headers for X-Forwarded-For: %s", forwarded_for_headers
|
||||
# Process multiple X-Forwarded-For from the right side (by reversing the list)
|
||||
forwarded_for_split = list(
|
||||
reversed(
|
||||
[addr for header in forwarded_for_headers for addr in header.split(",")]
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
# Process X-Forwarded-For from the right side (by reversing the list)
|
||||
forwarded_for_split = list(reversed(forwarded_for_headers[0].split(",")))
|
||||
)
|
||||
try:
|
||||
forwarded_for = [ip_address(addr.strip()) for addr in forwarded_for_split]
|
||||
except ValueError as err:
|
||||
@@ -148,14 +149,15 @@ def async_setup_forwarded(
|
||||
X_FORWARDED_PROTO, []
|
||||
)
|
||||
if forwarded_proto_headers:
|
||||
if len(forwarded_proto_headers) > 1:
|
||||
_LOGGER.error(
|
||||
"Too many headers for X-Forward-Proto: %s", forwarded_proto_headers
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
# Process multiple X-Forwarded-Proto from the right side (by reversing the list)
|
||||
forwarded_proto_split = list(
|
||||
reversed(forwarded_proto_headers[0].split(","))
|
||||
reversed(
|
||||
[
|
||||
addr
|
||||
for header in forwarded_proto_headers
|
||||
for addr in header.split(",")
|
||||
]
|
||||
)
|
||||
)
|
||||
forwarded_proto = [proto.strip() for proto in forwarded_proto_split]
|
||||
|
||||
@@ -191,14 +193,16 @@ def async_setup_forwarded(
|
||||
# Handle X-Forwarded-Host
|
||||
forwarded_host_headers: list[str] = request.headers.getall(X_FORWARDED_HOST, [])
|
||||
if forwarded_host_headers:
|
||||
# Multiple X-Forwarded-Host headers
|
||||
if len(forwarded_host_headers) > 1:
|
||||
_LOGGER.error(
|
||||
"Too many headers for X-Forwarded-Host: %s", forwarded_host_headers
|
||||
# Process multiple X-Forwarded-Host from the right side (by reversing the list)
|
||||
forwarded_host = list(
|
||||
reversed(
|
||||
[
|
||||
addr.strip()
|
||||
for header in forwarded_host_headers
|
||||
for addr in header.split(",")
|
||||
]
|
||||
)
|
||||
raise HTTPBadRequest
|
||||
|
||||
forwarded_host = forwarded_host_headers[0].strip()
|
||||
)[0]
|
||||
if not forwarded_host:
|
||||
_LOGGER.error("Empty value received in X-Forward-Host header")
|
||||
raise HTTPBadRequest
|
||||
|
||||
@@ -70,7 +70,7 @@ class ClearTrafficStatisticsButton(BaseButton):
|
||||
|
||||
entity_description = ButtonEntityDescription(
|
||||
key=BUTTON_KEY_CLEAR_TRAFFIC_STATISTICS,
|
||||
name="Clear traffic statistics",
|
||||
translation_key="clear_traffic_statistics",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
@@ -87,7 +87,7 @@ class RestartButton(BaseButton):
|
||||
|
||||
entity_description = ButtonEntityDescription(
|
||||
key=BUTTON_KEY_RESTART,
|
||||
name="Restart",
|
||||
translation_key="restart",
|
||||
device_class=ButtonDeviceClass.RESTART,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: Get percentage up there, add missing actual action press invocations in button tests' suspended state tests, rename test_switch.py to test_switch.py + make its functions receive hass as first parameter where applicable.
|
||||
comment: Get percentage up there, add missing actual action press invocations in button tests' suspended state tests.
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
@@ -61,9 +61,7 @@ rules:
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations:
|
||||
status: todo
|
||||
comment: Buttons and selects are lacking translations.
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: done
|
||||
|
||||
@@ -19,7 +19,6 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import UNDEFINED
|
||||
|
||||
from . import Router
|
||||
from .const import DOMAIN, KEY_NET_NET_MODE
|
||||
@@ -47,7 +46,6 @@ async def async_setup_entry(
|
||||
desc = HuaweiSelectEntityDescription(
|
||||
key=KEY_NET_NET_MODE,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
name="Preferred network mode",
|
||||
translation_key="preferred_network_mode",
|
||||
options=[
|
||||
NetworkModeEnum.MODE_AUTO.value,
|
||||
@@ -95,11 +93,6 @@ class HuaweiLteSelectEntity(HuaweiLteBaseEntityWithDevice, SelectEntity):
|
||||
self.key = key
|
||||
self.item = item
|
||||
|
||||
name = None
|
||||
if self.entity_description.name != UNDEFINED:
|
||||
name = self.entity_description.name
|
||||
self._attr_name = name or self.item
|
||||
|
||||
def select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
self.entity_description.setter_fn(option)
|
||||
|
||||
@@ -86,6 +86,14 @@
|
||||
"name": "SMS storage full"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"clear_traffic_statistics": {
|
||||
"name": "Clear traffic statistics"
|
||||
},
|
||||
"restart": {
|
||||
"name": "[%key:common::action::restart%]"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"uptime": {
|
||||
"name": "Uptime"
|
||||
|
||||
@@ -188,7 +188,7 @@ class HumidifierEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_AT
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the optional state attributes."""
|
||||
data: dict[str, Any] = self.generate_entity_state_attributes()
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
if self.action is not None:
|
||||
data[ATTR_ACTION] = self.action if self.is_on else HumidifierAction.OFF
|
||||
|
||||
@@ -112,7 +112,7 @@ class AutomowerButtonEntity(AutomowerControlEntity, ButtonEntity):
|
||||
self.mower_attributes
|
||||
)
|
||||
|
||||
@handle_sending_exception()
|
||||
@handle_sending_exception
|
||||
async def async_press(self) -> None:
|
||||
"""Send a command to the mower."""
|
||||
await self.entity_description.press_fn(self.coordinator.api, self.mower_id)
|
||||
|
||||
@@ -182,14 +182,6 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
|
||||
"Failed to listen to websocket. Trying to reconnect: %s",
|
||||
err,
|
||||
)
|
||||
if not hass.is_stopping:
|
||||
await asyncio.sleep(self.reconnect_time)
|
||||
self.reconnect_time = min(self.reconnect_time * 2, MAX_WS_RECONNECT_TIME)
|
||||
entry.async_create_background_task(
|
||||
hass,
|
||||
self.client_listen(hass, entry, automower_client),
|
||||
"reconnect_task",
|
||||
)
|
||||
|
||||
def _should_poll(self) -> bool:
|
||||
"""Return True if at least one mower is connected and at least one is not OFF."""
|
||||
|
||||
@@ -6,7 +6,7 @@ import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import functools
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar, overload
|
||||
|
||||
from aioautomower.exceptions import ApiError
|
||||
from aioautomower.model import MowerActivities, MowerAttributes, MowerStates, WorkArea
|
||||
@@ -37,23 +37,42 @@ ERROR_STATES = [
|
||||
]
|
||||
|
||||
|
||||
@callback
|
||||
def _work_area_translation_key(work_area_id: int, key: str) -> str:
|
||||
"""Return the translation key."""
|
||||
if work_area_id == 0:
|
||||
return f"my_lawn_{key}"
|
||||
return f"work_area_{key}"
|
||||
_Entity = TypeVar("_Entity", bound="AutomowerBaseEntity")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
|
||||
type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]]
|
||||
@overload
|
||||
def handle_sending_exception(
|
||||
_func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]: ...
|
||||
|
||||
|
||||
def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P](
|
||||
@overload
|
||||
def handle_sending_exception(
|
||||
*,
|
||||
poll_after_sending: bool = False,
|
||||
) -> Callable[[_FuncType[_Entity, _P, Any]], _FuncType[_Entity, _P, None]]:
|
||||
) -> Callable[
|
||||
[Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]]],
|
||||
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]],
|
||||
]: ...
|
||||
|
||||
|
||||
def handle_sending_exception(
|
||||
_func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]] | None = None,
|
||||
*,
|
||||
poll_after_sending: bool = False,
|
||||
) -> (
|
||||
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]
|
||||
| Callable[
|
||||
[Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]]],
|
||||
Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]],
|
||||
]
|
||||
):
|
||||
"""Handle exceptions while sending a command and optionally refresh coordinator."""
|
||||
|
||||
def decorator(func: _FuncType[_Entity, _P, Any]) -> _FuncType[_Entity, _P, None]:
|
||||
def decorator(
|
||||
func: Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_Entity, _P], Coroutine[Any, Any, None]]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(self: _Entity, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
try:
|
||||
@@ -73,7 +92,20 @@ def handle_sending_exception[_Entity: AutomowerBaseEntity, **_P](
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
if _func is None:
|
||||
# call with brackets: @handle_sending_exception(...)
|
||||
return decorator
|
||||
|
||||
# call without brackets: @handle_sending_exception
|
||||
return decorator(_func)
|
||||
|
||||
|
||||
@callback
|
||||
def _work_area_translation_key(work_area_id: int, key: str) -> str:
|
||||
"""Return the translation key."""
|
||||
if work_area_id == 0:
|
||||
return f"my_lawn_{key}"
|
||||
return f"work_area_{key}"
|
||||
|
||||
|
||||
class AutomowerBaseEntity(CoordinatorEntity[AutomowerDataUpdateCoordinator]):
|
||||
|
||||
@@ -135,22 +135,22 @@ class AutomowerLawnMowerEntity(AutomowerBaseEntity, LawnMowerEntity):
|
||||
"""Return the work areas of the mower."""
|
||||
return self.mower_attributes.work_areas
|
||||
|
||||
@handle_sending_exception()
|
||||
@handle_sending_exception
|
||||
async def async_start_mowing(self) -> None:
|
||||
"""Resume schedule."""
|
||||
await self.coordinator.api.commands.resume_schedule(self.mower_id)
|
||||
|
||||
@handle_sending_exception()
|
||||
@handle_sending_exception
|
||||
async def async_pause(self) -> None:
|
||||
"""Pauses the mower."""
|
||||
await self.coordinator.api.commands.pause_mowing(self.mower_id)
|
||||
|
||||
@handle_sending_exception()
|
||||
@handle_sending_exception
|
||||
async def async_dock(self) -> None:
|
||||
"""Parks the mower until next schedule."""
|
||||
await self.coordinator.api.commands.park_until_next_schedule(self.mower_id)
|
||||
|
||||
@handle_sending_exception()
|
||||
@handle_sending_exception
|
||||
async def async_override_schedule(
|
||||
self, override_mode: str, duration: timedelta
|
||||
) -> None:
|
||||
@@ -160,7 +160,7 @@ class AutomowerLawnMowerEntity(AutomowerBaseEntity, LawnMowerEntity):
|
||||
if override_mode == PARK:
|
||||
await self.coordinator.api.commands.park_for(self.mower_id, duration)
|
||||
|
||||
@handle_sending_exception()
|
||||
@handle_sending_exception
|
||||
async def async_override_schedule_work_area(
|
||||
self, work_area_id: int, duration: timedelta
|
||||
) -> None:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user