mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 01:08:12 +00:00
Merge branch 'zha_3phase_remaining_meetering' into zha_3ph_energy
This commit is contained in:
commit
66c3981ff5
@ -62,7 +62,7 @@
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
"url": "${containerWorkspaceFolder}/script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
11
.gitattributes
vendored
11
.gitattributes
vendored
@ -11,3 +11,14 @@
|
||||
*.pcm binary
|
||||
|
||||
Dockerfile.dev linguist-language=Dockerfile
|
||||
|
||||
# Generated files
|
||||
CODEOWNERS linguist-generated=true
|
||||
Dockerfile linguist-generated=true
|
||||
homeassistant/generated/*.py linguist-generated=true
|
||||
mypy.ini linguist-generated=true
|
||||
requirements.txt linguist-generated=true
|
||||
requirements_all.txt linguist-generated=true
|
||||
requirements_test_all.txt linguist-generated=true
|
||||
requirements_test_pre_commit.txt linguist-generated=true
|
||||
script/hassfest/docker/Dockerfile linguist-generated=true
|
||||
|
12
.github/workflows/builder.yml
vendored
12
.github/workflows/builder.yml
vendored
@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@67a2d409c0a876cbe6b11854e3e25193efe4e62d # v6.12.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@67a2d409c0a876cbe6b11854e3e25193efe4e62d # v6.12.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
30
.github/workflows/ci.yaml
vendored
30
.github/workflows/ci.yaml
vendored
@ -41,8 +41,8 @@ env:
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.2"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@ -537,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@ -661,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@ -877,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@ -979,14 +979,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@ -1106,7 +1106,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1114,7 +1114,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@ -1236,7 +1236,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1244,7 +1244,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@ -1273,7 +1273,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@v5.3.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@ -1378,14 +1378,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@ -1411,7 +1411,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@v5.3.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.0
|
||||
uses: github/codeql-action/init@v3.28.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.0
|
||||
uses: github/codeql-action/analyze@v3.28.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v9.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v9.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v9.0.0
|
||||
uses: actions/stale@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@ -10,7 +10,7 @@ on:
|
||||
- "**strings.json"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
|
45
.github/workflows/wheels.yml
vendored
45
.github/workflows/wheels.yml
vendored
@ -17,7 +17,7 @@ on:
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name}}
|
||||
@ -76,18 +76,37 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@ -99,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@ -123,6 +142,11 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@ -142,7 +166,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@ -167,6 +191,11 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@ -205,7 +234,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@ -219,7 +248,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@ -233,7 +262,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.3
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@ -61,13 +61,14 @@ repos:
|
||||
name: mypy
|
||||
entry: script/run-in-env.sh mypy
|
||||
language: script
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
types_or: [python, pyi]
|
||||
files: ^(homeassistant|pylint)/.+\.(py|pyi)$
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: script/run-in-env.sh pylint -j 0 --ignore-missing-annotations=y
|
||||
entry: script/run-in-env.sh pylint --ignore-missing-annotations=y
|
||||
language: script
|
||||
require_serial: true
|
||||
types_or: [python, pyi]
|
||||
files: ^(homeassistant|tests)/.+\.(py|pyi)$
|
||||
- id: gen_requirements_all
|
||||
|
@ -224,6 +224,7 @@ homeassistant.components.gpsd.*
|
||||
homeassistant.components.greeneye_monitor.*
|
||||
homeassistant.components.group.*
|
||||
homeassistant.components.guardian.*
|
||||
homeassistant.components.habitica.*
|
||||
homeassistant.components.hardkernel.*
|
||||
homeassistant.components.hardware.*
|
||||
homeassistant.components.here_travel_time.*
|
||||
@ -236,6 +237,7 @@ homeassistant.components.homeassistant_green.*
|
||||
homeassistant.components.homeassistant_hardware.*
|
||||
homeassistant.components.homeassistant_sky_connect.*
|
||||
homeassistant.components.homeassistant_yellow.*
|
||||
homeassistant.components.homee.*
|
||||
homeassistant.components.homekit.*
|
||||
homeassistant.components.homekit_controller
|
||||
homeassistant.components.homekit_controller.alarm_control_panel
|
||||
@ -261,6 +263,7 @@ homeassistant.components.image_processing.*
|
||||
homeassistant.components.image_upload.*
|
||||
homeassistant.components.imap.*
|
||||
homeassistant.components.imgw_pib.*
|
||||
homeassistant.components.incomfort.*
|
||||
homeassistant.components.input_button.*
|
||||
homeassistant.components.input_select.*
|
||||
homeassistant.components.input_text.*
|
||||
@ -291,6 +294,7 @@ homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
@ -305,12 +309,14 @@ homeassistant.components.logbook.*
|
||||
homeassistant.components.logger.*
|
||||
homeassistant.components.london_underground.*
|
||||
homeassistant.components.lookin.*
|
||||
homeassistant.components.lovelace.*
|
||||
homeassistant.components.luftdaten.*
|
||||
homeassistant.components.madvr.*
|
||||
homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mcp_server.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
@ -362,11 +368,14 @@ homeassistant.components.openuv.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
@ -380,6 +389,8 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qbus.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
|
3
.vscode/settings.default.json
vendored
3
.vscode/settings.default.json
vendored
@ -1,5 +1,5 @@
|
||||
{
|
||||
// Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
||||
// Please keep this file (mostly!) in sync with settings in home-assistant/.devcontainer/devcontainer.json
|
||||
// Added --no-cov to work around TypeError: message must be set
|
||||
// https://github.com/microsoft/vscode-python/issues/14067
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
@ -12,6 +12,7 @@
|
||||
"fileMatch": [
|
||||
"homeassistant/components/*/manifest.json"
|
||||
],
|
||||
// This value differs between working with devcontainer and locally, therefor this value should NOT be in sync!
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
|
44
CODEOWNERS
generated
44
CODEOWNERS
generated
@ -637,6 +637,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
||||
/tests/components/homeassistant_yellow/ @home-assistant/core
|
||||
/homeassistant/components/homee/ @Taraman17
|
||||
/tests/components/homee/ @Taraman17
|
||||
/homeassistant/components/homekit/ @bdraco
|
||||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
@ -680,12 +682,12 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/iammeter/ @lewei50
|
||||
/homeassistant/components/iaqualink/ @flz
|
||||
/tests/components/iaqualink/ @flz
|
||||
/homeassistant/components/ibeacon/ @bdraco
|
||||
/tests/components/ibeacon/ @bdraco
|
||||
/homeassistant/components/icloud/ @Quentame @nzapponi
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
/tests/components/ign_sismologia/ @exxamalte
|
||||
/homeassistant/components/image/ @home-assistant/core
|
||||
@ -827,6 +829,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lektrico/ @lektrico
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/letpot/ @jpelgrom
|
||||
/tests/components/letpot/ @jpelgrom
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
@ -887,6 +891,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/matrix/ @PaarthShah
|
||||
/homeassistant/components/matter/ @home-assistant/matter
|
||||
/tests/components/matter/ @home-assistant/matter
|
||||
/homeassistant/components/mcp_server/ @allenporter
|
||||
/tests/components/mcp_server/ @allenporter
|
||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||
@ -1016,7 +1022,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
/homeassistant/components/nissan_leaf/ @filcole
|
||||
/homeassistant/components/nmbs/ @thibmaek
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
@ -1068,8 +1073,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/onvif/ @hunterjm @jterrace
|
||||
/tests/components/onvif/ @hunterjm @jterrace
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
@ -1105,6 +1110,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@ -1182,6 +1189,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pyload/ @tr4nt0r
|
||||
/homeassistant/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/tests/components/qbittorrent/ @geoffreylagaisse @finder39
|
||||
/homeassistant/components/qbus/ @Qbus-iot @thomasddn
|
||||
/tests/components/qbus/ @Qbus-iot @thomasddn
|
||||
/homeassistant/components/qingping/ @bdraco
|
||||
/tests/components/qingping/ @bdraco
|
||||
/homeassistant/components/qld_bushfire/ @exxamalte
|
||||
@ -1258,8 +1267,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L
|
||||
/tests/components/roborock/ @Lash-L
|
||||
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||
/tests/components/roborock/ @Lash-L @allenporter
|
||||
/homeassistant/components/roku/ @ctalkington
|
||||
/tests/components/roku/ @ctalkington
|
||||
/homeassistant/components/romy/ @xeniter
|
||||
@ -1278,6 +1287,7 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ruckus_unleashed/ @lanrat @ms264556 @gabe565
|
||||
/homeassistant/components/russound_rio/ @noahhusby
|
||||
/tests/components/russound_rio/ @noahhusby
|
||||
/homeassistant/components/russound_rnet/ @noahhusby
|
||||
/homeassistant/components/ruuvi_gateway/ @akx
|
||||
/tests/components/ruuvi_gateway/ @akx
|
||||
/homeassistant/components/ruuvitag_ble/ @akx
|
||||
@ -1371,8 +1381,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/slide_local/ @dontinelli
|
||||
/homeassistant/components/slimproto/ @marcelveldt
|
||||
/tests/components/slimproto/ @marcelveldt
|
||||
/homeassistant/components/sma/ @kellerza @rklomp
|
||||
/tests/components/sma/ @kellerza @rklomp
|
||||
/homeassistant/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/tests/components/sma/ @kellerza @rklomp @erwindouna
|
||||
/homeassistant/components/smappee/ @bsmappee
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
@ -1398,8 +1408,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||
/homeassistant/components/solax/ @squishykid
|
||||
/tests/components/solax/ @squishykid
|
||||
/homeassistant/components/solax/ @squishykid @Darsstar
|
||||
/tests/components/solax/ @squishykid @Darsstar
|
||||
/homeassistant/components/soma/ @ratsept @sebfortier2288
|
||||
/tests/components/soma/ @ratsept @sebfortier2288
|
||||
/homeassistant/components/sonarr/ @ctalkington
|
||||
@ -1478,8 +1488,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @chiefdragon @erwindouna
|
||||
/tests/components/tado/ @chiefdragon @erwindouna
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
@ -1618,15 +1628,15 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
|
4
Dockerfile
generated
4
Dockerfile
generated
@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.8
|
||||
RUN pip3 install uv==0.5.21
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
@ -55,7 +55,7 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
@ -308,7 +308,7 @@ class AuthStore:
|
||||
credentials.data = data
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_load(self) -> None: # noqa: C901
|
||||
async def async_load(self) -> None:
|
||||
"""Load the users."""
|
||||
if self._loaded:
|
||||
raise RuntimeError("Auth storage is already loaded")
|
||||
|
@ -4,9 +4,8 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import types
|
||||
from typing import Any, Generic
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -35,12 +34,6 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MultiFactorAuthModuleT = TypeVar(
|
||||
"_MultiFactorAuthModuleT",
|
||||
bound="MultiFactorAuthModule",
|
||||
default="MultiFactorAuthModule",
|
||||
)
|
||||
|
||||
|
||||
class MultiFactorAuthModule:
|
||||
"""Multi-factor Auth Module of validation function."""
|
||||
@ -102,7 +95,9 @@ class MultiFactorAuthModule:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]):
|
||||
class SetupFlow[_MultiFactorAuthModuleT: MultiFactorAuthModule = MultiFactorAuthModule](
|
||||
data_entry_flow.FlowHandler
|
||||
):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
|
@ -11,7 +11,7 @@ import uuid
|
||||
import attr
|
||||
from attr import Attribute
|
||||
from attr.setters import validate
|
||||
from propcache import cached_property
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.const import __version__
|
||||
from homeassistant.data_entry_flow import FlowContext, FlowResult
|
||||
|
@ -17,12 +17,12 @@ POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
|
||||
|
||||
__all__ = [
|
||||
"POLICY_SCHEMA",
|
||||
"merge_policies",
|
||||
"PermissionLookup",
|
||||
"PolicyType",
|
||||
"AbstractPermissions",
|
||||
"PolicyPermissions",
|
||||
"OwnerPermissions",
|
||||
"PermissionLookup",
|
||||
"PolicyPermissions",
|
||||
"PolicyType",
|
||||
"merge_policies",
|
||||
]
|
||||
|
||||
|
||||
|
@ -5,9 +5,8 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import types
|
||||
from typing import Any, Generic
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -47,8 +46,6 @@ AUTH_PROVIDER_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider")
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
@ -195,9 +192,8 @@ async def load_auth_provider_module(
|
||||
return module
|
||||
|
||||
|
||||
class LoginFlow(
|
||||
class LoginFlow[_AuthProviderT: AuthProvider = AuthProvider](
|
||||
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
Generic[_AuthProviderT],
|
||||
):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
|
@ -119,7 +119,7 @@ def _extract_backup(
|
||||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
f"homeassistant.tar{'.gz' if backup_meta['compressed'] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
key=password_to_key(restore_content.password)
|
||||
|
@ -31,7 +31,7 @@ def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If the file is in /proc we can ignore it.
|
||||
args = mapped_args["args"]
|
||||
path = args[0] if type(args[0]) is str else str(args[0]) # noqa: E721
|
||||
path = args[0] if type(args[0]) is str else str(args[0])
|
||||
return path.startswith(ALLOWED_FILE_PREFIXES)
|
||||
|
||||
|
||||
|
@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@ -106,11 +106,17 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
from anyio._backends import _asyncio # noqa: F401
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# httpx will import trio if it is installed which does
|
||||
# blocking I/O in the event loop. We want to avoid that.
|
||||
import trio # noqa: F401
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .runner import RuntimeConfig
|
||||
|
@ -2,6 +2,7 @@
|
||||
"domain": "microsoft",
|
||||
"name": "Microsoft",
|
||||
"integrations": [
|
||||
"azure_data_explorer",
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
|
@ -34,17 +34,17 @@
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"name": "Capture image",
|
||||
"description": "Request a new image capture from a camera device.",
|
||||
"description": "Requests a new image capture from a camera device.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the camera to request an image."
|
||||
"description": "Entity ID of the camera to request an image from."
|
||||
}
|
||||
}
|
||||
},
|
||||
"change_setting": {
|
||||
"name": "Change setting",
|
||||
"description": "Change an Abode system setting.",
|
||||
"description": "Changes an Abode system setting.",
|
||||
"fields": {
|
||||
"setting": {
|
||||
"name": "Setting",
|
||||
@ -58,11 +58,11 @@
|
||||
},
|
||||
"trigger_automation": {
|
||||
"name": "Trigger automation",
|
||||
"description": "Trigger an Abode automation.",
|
||||
"description": "Triggers an Abode automation.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the automation to trigger."
|
||||
"description": "Entity ID of the automation to trigger."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.14"]
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ class PulseHub:
|
||||
|
||||
async def async_notify_update(self, update_type: aiopulse.UpdateType) -> None:
|
||||
"""Evaluate entities when hub reports that update has occurred."""
|
||||
LOGGER.debug("Hub {update_type.name} updated")
|
||||
LOGGER.debug("Hub %s updated", update_type.name)
|
||||
|
||||
if update_type == aiopulse.UpdateType.rollers:
|
||||
await update_devices(self.hass, self.config_entry, self.api.rollers)
|
||||
|
@ -3,9 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import telnetlib # pylint: disable=deprecated-module
|
||||
from typing import Final
|
||||
|
||||
import telnetlib # pylint: disable=deprecated-module
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
|
@ -34,9 +34,12 @@ from .const import (
|
||||
SERVICE_REMOVE_URL,
|
||||
)
|
||||
|
||||
SERVICE_URL_SCHEMA = vol.Schema({vol.Required(CONF_URL): cv.url})
|
||||
SERVICE_URL_SCHEMA = vol.Schema({vol.Required(CONF_URL): vol.Any(cv.url, cv.path)})
|
||||
SERVICE_ADD_URL_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_URL): cv.url}
|
||||
{
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_URL): vol.Any(cv.url, cv.path),
|
||||
}
|
||||
)
|
||||
SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
|
@ -66,7 +66,7 @@ class AdvantageAirZoneMotion(AdvantageAirZoneEntity, BinarySensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone Motion sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} motion'
|
||||
self._attr_name = f"{self._zone['name']} motion"
|
||||
self._attr_unique_id += "-motion"
|
||||
|
||||
@property
|
||||
@ -84,7 +84,7 @@ class AdvantageAirZoneMyZone(AdvantageAirZoneEntity, BinarySensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone MyZone sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} myZone'
|
||||
self._attr_name = f"{self._zone['name']} myZone"
|
||||
self._attr_unique_id += "-myzone"
|
||||
|
||||
@property
|
||||
|
@ -103,7 +103,7 @@ class AdvantageAirZoneVent(AdvantageAirZoneEntity, SensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone Vent Sensor."""
|
||||
super().__init__(instance, ac_key, zone_key=zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} vent'
|
||||
self._attr_name = f"{self._zone['name']} vent"
|
||||
self._attr_unique_id += "-vent"
|
||||
|
||||
@property
|
||||
@ -131,7 +131,7 @@ class AdvantageAirZoneSignal(AdvantageAirZoneEntity, SensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone wireless signal sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} signal'
|
||||
self._attr_name = f"{self._zone['name']} signal"
|
||||
self._attr_unique_id += "-signal"
|
||||
|
||||
@property
|
||||
@ -165,7 +165,7 @@ class AdvantageAirZoneTemp(AdvantageAirZoneEntity, SensorEntity):
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an Advantage Air Zone Temp Sensor."""
|
||||
super().__init__(instance, ac_key, zone_key)
|
||||
self._attr_name = f'{self._zone["name"]} temperature'
|
||||
self._attr_name = f"{self._zone['name']} temperature"
|
||||
self._attr_unique_id += "-temp"
|
||||
|
||||
@property
|
||||
|
@ -18,7 +18,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
from .entity import AirGradientEntity, exception_handler
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@ -100,6 +102,7 @@ class AirGradientButton(AirGradientEntity, ButtonEntity):
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.serial_number}-{description.key}"
|
||||
|
||||
@exception_handler
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
await self.entity_description.press_fn(self.coordinator.client)
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Config flow for Airgradient."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from airgradient import (
|
||||
@ -11,10 +12,15 @@ from airgradient import (
|
||||
from awesomeversion import AwesomeVersion
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
SOURCE_USER,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_MODEL
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@ -37,7 +43,7 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.client.set_configuration_control(ConfigurationControl.LOCAL)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
self.data[CONF_HOST] = host = discovery_info.host
|
||||
@ -95,10 +101,18 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(
|
||||
current_measures.serial_number, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
if self.source == SOURCE_USER:
|
||||
self._abort_if_unique_id_configured()
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
await self.set_configuration_source()
|
||||
return self.async_create_entry(
|
||||
title=current_measures.model,
|
||||
if self.source == SOURCE_USER:
|
||||
return self.async_create_entry(
|
||||
title=current_measures.model,
|
||||
data={CONF_HOST: user_input[CONF_HOST]},
|
||||
)
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data={CONF_HOST: user_input[CONF_HOST]},
|
||||
)
|
||||
return self.async_show_form(
|
||||
@ -106,3 +120,9 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration."""
|
||||
return await self.async_step_user()
|
||||
|
@ -55,7 +55,11 @@ class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]):
|
||||
measures = await self.client.get_current_measures()
|
||||
config = await self.client.get_config()
|
||||
except AirGradientError as error:
|
||||
raise UpdateFailed(error) from error
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
if measures.firmware_version != self._current_version:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
device_entry = device_registry.async_get_device(
|
||||
|
@ -1,7 +1,11 @@
|
||||
"""Base class for AirGradient entities."""
|
||||
|
||||
from airgradient import get_model_name
|
||||
from collections.abc import Callable, Coroutine
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from airgradient import AirGradientConnectionError, AirGradientError, get_model_name
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@ -26,3 +30,31 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]):
|
||||
serial_number=coordinator.serial_number,
|
||||
sw_version=measures.firmware_version,
|
||||
)
|
||||
|
||||
|
||||
def exception_handler[_EntityT: AirGradientEntity, **_P](
|
||||
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
|
||||
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
|
||||
"""Decorate AirGradient calls to handle exceptions.
|
||||
|
||||
A decorator that wraps the passed in function, catches AirGradient errors.
|
||||
"""
|
||||
|
||||
async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
|
||||
try:
|
||||
await func(self, *args, **kwargs)
|
||||
except AirGradientConnectionError as error:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="communication_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
|
||||
except AirGradientError as error:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unknown_error",
|
||||
translation_placeholders={"error": str(error)},
|
||||
) from error
|
||||
|
||||
return handler
|
||||
|
@ -19,7 +19,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
from .entity import AirGradientEntity, exception_handler
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@ -121,6 +123,7 @@ class AirGradientNumber(AirGradientEntity, NumberEntity):
|
||||
"""Return the state of the number."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.config)
|
||||
|
||||
@exception_handler
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the selected value."""
|
||||
await self.entity_description.set_value_fn(self.coordinator.client, int(value))
|
||||
|
@ -29,7 +29,7 @@ rules:
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
@ -38,7 +38,7 @@ rules:
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
@ -68,9 +68,9 @@ rules:
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
exception-translations: done
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
@ -19,7 +19,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN, PM_STANDARD, PM_STANDARD_REVERSE
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
from .entity import AirGradientEntity, exception_handler
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@ -216,6 +218,7 @@ class AirGradientSelect(AirGradientEntity, SelectEntity):
|
||||
"""Return the state of the select."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.config)
|
||||
|
||||
@exception_handler
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self.entity_description.set_value_fn(self.coordinator.client, option)
|
||||
|
@ -35,6 +35,8 @@ from .const import PM_STANDARD, PM_STANDARD_REVERSE
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirGradientMeasurementSensorEntityDescription(SensorEntityDescription):
|
||||
@ -137,6 +139,15 @@ MEASUREMENT_SENSOR_TYPES: tuple[AirGradientMeasurementSensorEntityDescription, .
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda status: status.raw_total_volatile_organic_component,
|
||||
),
|
||||
AirGradientMeasurementSensorEntityDescription(
|
||||
key="pm02_raw",
|
||||
translation_key="raw_pm02",
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda status: status.raw_pm02,
|
||||
),
|
||||
)
|
||||
|
||||
CONFIG_SENSOR_TYPES: tuple[AirGradientConfigSensorEntityDescription, ...] = (
|
||||
|
@ -17,7 +17,9 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"invalid_version": "This firmware version is unsupported. Please upgrade the firmware of the device to at least version 3.1.1."
|
||||
"invalid_version": "This firmware version is unsupported. Please upgrade the firmware of the device to at least version 3.1.1.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "Please ensure you reconfigure against the same device."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@ -119,6 +121,9 @@
|
||||
"raw_nitrogen": {
|
||||
"name": "Raw NOx"
|
||||
},
|
||||
"raw_pm02": {
|
||||
"name": "Raw PM2.5"
|
||||
},
|
||||
"display_pm_standard": {
|
||||
"name": "[%key:component::airgradient::entity::select::display_pm_standard::name%]",
|
||||
"state": {
|
||||
@ -162,5 +167,16 @@
|
||||
"name": "Post data to Airgradient"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"communication_error": {
|
||||
"message": "An error occurred while communicating with the Airgradient device: {error}"
|
||||
},
|
||||
"unknown_error": {
|
||||
"message": "An unknown error occurred while communicating with the Airgradient device: {error}"
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while communicating with the Airgradient device: {error}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from . import AirGradientConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirGradientCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
from .entity import AirGradientEntity, exception_handler
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@ -99,11 +101,13 @@ class AirGradientSwitch(AirGradientEntity, SwitchEntity):
|
||||
"""Return the state of the switch."""
|
||||
return self.entity_description.value_fn(self.coordinator.data.config)
|
||||
|
||||
@exception_handler
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.entity_description.set_value_fn(self.coordinator.client, True)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@exception_handler
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.entity_description.set_value_fn(self.coordinator.client, False)
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from propcache import cached_property
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -11,6 +11,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from . import AirGradientConfigEntry, AirGradientCoordinator
|
||||
from .entity import AirGradientEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(hours=1)
|
||||
|
||||
|
||||
|
@ -21,7 +21,6 @@ from .const import (
|
||||
ATTR_API_CAT_DESCRIPTION,
|
||||
ATTR_API_CAT_LEVEL,
|
||||
ATTR_API_CATEGORY,
|
||||
ATTR_API_PM25,
|
||||
ATTR_API_POLLUTANT,
|
||||
ATTR_API_REPORT_DATE,
|
||||
ATTR_API_REPORT_HOUR,
|
||||
@ -91,18 +90,16 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
max_aqi_desc = obv[ATTR_API_CATEGORY][ATTR_API_CAT_DESCRIPTION]
|
||||
max_aqi_poll = pollutant
|
||||
|
||||
# Copy other data from PM2.5 Value
|
||||
if obv[ATTR_API_AQI_PARAM] == ATTR_API_PM25:
|
||||
# Copy Report Details
|
||||
data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE]
|
||||
data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR]
|
||||
data[ATTR_API_REPORT_TZ] = obv[ATTR_API_REPORT_TZ]
|
||||
# Copy Report Details
|
||||
data[ATTR_API_REPORT_DATE] = obv[ATTR_API_REPORT_DATE]
|
||||
data[ATTR_API_REPORT_HOUR] = obv[ATTR_API_REPORT_HOUR]
|
||||
data[ATTR_API_REPORT_TZ] = obv[ATTR_API_REPORT_TZ]
|
||||
|
||||
# Copy Station Details
|
||||
data[ATTR_API_STATE] = obv[ATTR_API_STATE]
|
||||
data[ATTR_API_STATION] = obv[ATTR_API_STATION]
|
||||
data[ATTR_API_STATION_LATITUDE] = obv[ATTR_API_STATION_LATITUDE]
|
||||
data[ATTR_API_STATION_LONGITUDE] = obv[ATTR_API_STATION_LONGITUDE]
|
||||
# Copy Station Details
|
||||
data[ATTR_API_STATE] = obv[ATTR_API_STATE]
|
||||
data[ATTR_API_STATION] = obv[ATTR_API_STATION]
|
||||
data[ATTR_API_STATION_LATITUDE] = obv[ATTR_API_STATION_LATITUDE]
|
||||
data[ATTR_API_STATION_LONGITUDE] = obv[ATTR_API_STATION_LONGITUDE]
|
||||
|
||||
# Store Overall AQI
|
||||
data[ATTR_API_AQI] = max_aqi
|
||||
|
@ -39,45 +39,54 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@ -85,16 +94,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
@ -143,8 +155,7 @@ class AirthingsHeaterEnergySensor(
|
||||
self._id = airthings_device.device_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=(
|
||||
"https://dashboard.airthings.com/devices/"
|
||||
f"{airthings_device.device_id}"
|
||||
f"https://dashboard.airthings.com/devices/{airthings_device.device_id}"
|
||||
),
|
||||
identifiers={(DOMAIN, airthings_device.device_id)},
|
||||
name=airthings_device.name,
|
||||
|
@ -67,18 +67,21 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@ -86,24 +89,28 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"illuminance": SensorEntityDescription(
|
||||
key="illuminance",
|
||||
translation_key="illuminance",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ SENSOR_DESCRIPTIONS = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda settings, status, measurements, history: int(
|
||||
history.get(
|
||||
f'Outdoor {"AQI(US)" if settings["is_aqi_usa"] else "AQI(CN)"}', -1
|
||||
f"Outdoor {'AQI(US)' if settings['is_aqi_usa'] else 'AQI(CN)'}", -1
|
||||
)
|
||||
),
|
||||
translation_key="outdoor_air_quality_index",
|
||||
|
@ -5,7 +5,14 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioairzone.const import AZD_MAC, AZD_WEBSERVER, DEFAULT_SYSTEM_ID
|
||||
from aioairzone.const import (
|
||||
AZD_FIRMWARE,
|
||||
AZD_FULL_NAME,
|
||||
AZD_MAC,
|
||||
AZD_MODEL,
|
||||
AZD_WEBSERVER,
|
||||
DEFAULT_SYSTEM_ID,
|
||||
)
|
||||
from aioairzone.localapi import AirzoneLocalApi, ConnectionOptions
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@ -17,6 +24,7 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, MANUFACTURER
|
||||
from .coordinator import AirzoneUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
@ -78,7 +86,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> b
|
||||
options = ConnectionOptions(
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_PORT],
|
||||
entry.data.get(CONF_ID, DEFAULT_SYSTEM_ID),
|
||||
entry.data[CONF_ID],
|
||||
)
|
||||
|
||||
airzone = AirzoneLocalApi(aiohttp_client.async_get_clientsession(hass), options)
|
||||
@ -88,6 +96,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> b
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
ws_data: dict[str, Any] | None = coordinator.data.get(AZD_WEBSERVER)
|
||||
if ws_data is not None:
|
||||
mac = ws_data.get(AZD_MAC, "")
|
||||
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, mac)},
|
||||
identifiers={(DOMAIN, f"{entry.entry_id}_ws")},
|
||||
manufacturer=MANUFACTURER,
|
||||
model=ws_data.get(AZD_MODEL),
|
||||
name=ws_data.get(AZD_FULL_NAME),
|
||||
sw_version=ws_data.get(AZD_FIRMWARE),
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
@ -96,3 +120,25 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> b
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AirzoneConfigEntry) -> bool:
|
||||
"""Migrate an old entry."""
|
||||
if entry.version == 1 and entry.minor_version < 2:
|
||||
# Add missing CONF_ID
|
||||
system_id = entry.data.get(CONF_ID, DEFAULT_SYSTEM_ID)
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_ID] = system_id
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data=new_data,
|
||||
minor_version=2,
|
||||
)
|
||||
|
||||
_LOGGER.info(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
entry.version,
|
||||
entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
@ -10,12 +10,12 @@ from aioairzone.exceptions import AirzoneError, InvalidSystem
|
||||
from aioairzone.localapi import AirzoneLocalApi, ConnectionOptions
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import dhcp
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_ID, CONF_PORT
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
@ -44,6 +44,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
_discovered_ip: str | None = None
|
||||
_discovered_mac: str | None = None
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -53,6 +54,9 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
if CONF_ID not in user_input:
|
||||
user_input[CONF_ID] = DEFAULT_SYSTEM_ID
|
||||
|
||||
self._async_abort_entries_match(user_input)
|
||||
|
||||
airzone = AirzoneLocalApi(
|
||||
@ -60,7 +64,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
ConnectionOptions(
|
||||
user_input[CONF_HOST],
|
||||
user_input[CONF_PORT],
|
||||
user_input.get(CONF_ID, DEFAULT_SYSTEM_ID),
|
||||
user_input[CONF_ID],
|
||||
),
|
||||
)
|
||||
|
||||
@ -84,6 +88,9 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
title = f"Airzone {user_input[CONF_HOST]}:{user_input[CONF_PORT]}"
|
||||
if user_input[CONF_ID] != DEFAULT_SYSTEM_ID:
|
||||
title += f" #{user_input[CONF_ID]}"
|
||||
|
||||
return self.async_create_entry(title=title, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
@ -93,7 +100,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: dhcp.DhcpServiceInfo
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle DHCP discovery."""
|
||||
self._discovered_ip = discovery_info.ip
|
||||
|
@ -68,8 +68,9 @@ class AirzoneSystemEntity(AirzoneEntity):
|
||||
model=self.get_airzone_value(AZD_MODEL),
|
||||
name=f"System {self.system_id}",
|
||||
sw_version=self.get_airzone_value(AZD_FIRMWARE),
|
||||
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
||||
)
|
||||
if AZD_WEBSERVER in self.coordinator.data:
|
||||
self._attr_device_info["via_device"] = (DOMAIN, f"{entry.entry_id}_ws")
|
||||
self._attr_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
@property
|
||||
@ -102,8 +103,9 @@ class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
manufacturer=MANUFACTURER,
|
||||
model="DHW",
|
||||
name=self.get_airzone_value(AZD_NAME),
|
||||
via_device=(DOMAIN, f"{entry.entry_id}_ws"),
|
||||
)
|
||||
if AZD_WEBSERVER in self.coordinator.data:
|
||||
self._attr_device_info["via_device"] = (DOMAIN, f"{entry.entry_id}_ws")
|
||||
self._attr_unique_id = entry.unique_id or entry.entry_id
|
||||
|
||||
def get_airzone_value(self, key: str) -> Any:
|
||||
|
@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.7"]
|
||||
"requirements": ["aioairzone==0.9.9"]
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ from datetime import timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
|
||||
from propcache import cached_property
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -474,25 +474,30 @@ class ClimateCapabilities(AlexaEntity):
|
||||
# If we support two modes, one being off, we allow turning on too.
|
||||
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and climate.HVACMode.OFF
|
||||
in (self.entity.attributes.get(climate.ATTR_HVAC_MODES) or [])
|
||||
or self.entity.domain == climate.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& (
|
||||
climate.ClimateEntityFeature.TURN_ON
|
||||
| climate.ClimateEntityFeature.TURN_OFF
|
||||
(
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and climate.HVACMode.OFF
|
||||
in (self.entity.attributes.get(climate.ATTR_HVAC_MODES) or [])
|
||||
)
|
||||
or (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& (
|
||||
climate.ClimateEntityFeature.TURN_ON
|
||||
| climate.ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
)
|
||||
)
|
||||
or self.entity.domain == water_heater.DOMAIN
|
||||
and (supported_features & water_heater.WaterHeaterEntityFeature.ON_OFF)
|
||||
or (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (supported_features & water_heater.WaterHeaterEntityFeature.ON_OFF)
|
||||
)
|
||||
):
|
||||
yield AlexaPowerController(self.entity)
|
||||
|
||||
if (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
or self.entity.domain == water_heater.DOMAIN
|
||||
if self.entity.domain == climate.DOMAIN or (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
|
@ -317,9 +317,8 @@ async def async_enable_proactive_mode(
|
||||
|
||||
if should_doorbell:
|
||||
old_state = data["old_state"]
|
||||
if (
|
||||
new_state.domain == event.DOMAIN
|
||||
or new_state.state == STATE_ON
|
||||
if new_state.domain == event.DOMAIN or (
|
||||
new_state.state == STATE_ON
|
||||
and (old_state is None or old_state.state != STATE_ON)
|
||||
):
|
||||
await async_send_doorbell_event_message(
|
||||
|
@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -21,7 +21,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@ -38,17 +38,17 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android state detection rules",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"description": "Configure detection rule for application ID {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
|
||||
"rule_values": "List of state detection rules (see documentation)",
|
||||
|
@ -14,7 +14,6 @@ from androidtvremote2 import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
ConfigEntry,
|
||||
@ -31,6 +30,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_APP_ICON, CONF_APP_NAME, CONF_APPS, CONF_ENABLE_IME, DOMAIN
|
||||
from .helpers import create_api, get_enable_ime
|
||||
@ -142,7 +142,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
_LOGGER.debug("Android TV device found via zeroconf: %s", discovery_info)
|
||||
@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# and one of them, which could end up being in discovery_info.host, is from a
|
||||
# different device. If any of the discovery_info.ip_addresses matches the
|
||||
# existing host, don't update the host.
|
||||
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
|
||||
if (
|
||||
existing_config_entry
|
||||
# Ignored entries don't have host
|
||||
and CONF_HOST in existing_config_entry.data
|
||||
and len(discovery_info.ip_addresses) > 1
|
||||
):
|
||||
existing_host = existing_config_entry.data[CONF_HOST]
|
||||
if existing_host != self.host:
|
||||
if existing_host in [
|
||||
|
@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application Icon",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import device_registry as dr, intent, llm, template
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import ulid
|
||||
from homeassistant.util import ulid as ulid_util
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
@ -164,7 +164,7 @@ class AnthropicConversationEntity(
|
||||
]
|
||||
|
||||
if user_input.conversation_id is None:
|
||||
conversation_id = ulid.ulid_now()
|
||||
conversation_id = ulid_util.ulid_now()
|
||||
messages = []
|
||||
|
||||
elif user_input.conversation_id in self.history:
|
||||
@ -177,8 +177,8 @@ class AnthropicConversationEntity(
|
||||
# a new conversation was started. If the user picks their own, they
|
||||
# want to track a conversation and we respect it.
|
||||
try:
|
||||
ulid.ulid_to_bytes(user_input.conversation_id)
|
||||
conversation_id = ulid.ulid_now()
|
||||
ulid_util.ulid_to_bytes(user_input.conversation_id)
|
||||
conversation_id = ulid_util.ulid_now()
|
||||
except ValueError:
|
||||
conversation_id = user_input.conversation_id
|
||||
|
||||
|
@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.31.2"]
|
||||
"requirements": ["anthropic==0.44.0"]
|
||||
}
|
||||
|
@ -44,7 +44,10 @@ class APCUPSdData(dict[str, str]):
|
||||
@property
|
||||
def serial_no(self) -> str | None:
|
||||
"""Return the unique serial number of the UPS, if available."""
|
||||
return self.get("SERIALNO")
|
||||
sn = self.get("SERIALNO")
|
||||
# We had user reports that some UPS models simply return "Blank" as serial number, in
|
||||
# which case we fall back to `None` to indicate that it is actually not available.
|
||||
return None if sn == "Blank" else sn
|
||||
|
||||
|
||||
class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
|
@ -34,6 +34,7 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaFlowFormStep,
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import CONF_CREDENTIALS, CONF_IDENTIFIERS, CONF_START_OFF, DOMAIN
|
||||
|
||||
@ -98,7 +99,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@ -118,6 +118,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
@ -204,7 +205,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle device found via zeroconf."""
|
||||
if discovery_info.ip_address.version == 6:
|
||||
|
@ -38,7 +38,7 @@ from homeassistant.loader import (
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
__all__ = ["ClientCredential", "AuthorizationServer", "async_import_client_credential"]
|
||||
__all__ = ["AuthorizationServer", "ClientCredential", "async_import_client_credential"]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
@ -50,7 +50,7 @@ async def async_setup_entry(
|
||||
|
||||
descriptions: list[AprilaireHumidifierDescription] = []
|
||||
|
||||
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (0, 1, 2):
|
||||
if coordinator.data.get(Attribute.HUMIDIFICATION_AVAILABLE) in (1, 2):
|
||||
descriptions.append(
|
||||
AprilaireHumidifierDescription(
|
||||
key="humidifier",
|
||||
@ -67,7 +67,7 @@ async def async_setup_entry(
|
||||
)
|
||||
)
|
||||
|
||||
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) in (0, 1):
|
||||
if coordinator.data.get(Attribute.DEHUMIDIFICATION_AVAILABLE) == 1:
|
||||
descriptions.append(
|
||||
AprilaireHumidifierDescription(
|
||||
key="dehumidifier",
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
@ -29,6 +29,8 @@ class ApSystemsSensorData:
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
"""Coordinator used for all sensors."""
|
||||
|
||||
device_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
|
||||
"""Initialize my coordinator."""
|
||||
super().__init__(
|
||||
@ -46,6 +48,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
@ -21,7 +21,8 @@ class ApSystemsEntity(Entity):
|
||||
"""Initialize the APsystems entity."""
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
serial_number=data.device_id,
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
)
|
||||
|
@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.4.0"]
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@ -142,6 +143,7 @@ def _sensor_device_info_to_hass(
|
||||
if adv.readings and adv.readings.name:
|
||||
hass_device_info[ATTR_NAME] = adv.readings.name
|
||||
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
|
||||
hass_device_info[ATTR_MODEL] = adv.readings.type.model
|
||||
if adv.manufacturer_data:
|
||||
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
|
||||
return hass_device_info
|
||||
|
@ -9,10 +9,10 @@ from arcam.fmj.client import Client, ConnectionFailed
|
||||
from arcam.fmj.utils import get_uniqueid_from_host, get_uniqueid_from_udn
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import ssdp
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.ssdp import ATTR_UPNP_UDN, SsdpServiceInfo
|
||||
|
||||
from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN
|
||||
|
||||
@ -88,12 +88,12 @@ class ArcamFmjFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: ssdp.SsdpServiceInfo
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered device."""
|
||||
host = str(urlparse(discovery_info.ssdp_location).hostname)
|
||||
port = DEFAULT_PORT
|
||||
uuid = get_uniqueid_from_udn(discovery_info.upnp[ssdp.ATTR_UPNP_UDN])
|
||||
uuid = get_uniqueid_from_udn(discovery_info.upnp[ATTR_UPNP_UDN])
|
||||
if not uuid:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
|
@ -90,7 +90,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh = pexpect.spawn(connect)
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
"password:",
|
||||
@ -125,12 +125,12 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
ssh.expect("#")
|
||||
ssh.sendline("show clients")
|
||||
ssh.expect("#")
|
||||
devices_result = ssh.before.split(b"\r\n")
|
||||
devices_result = (ssh.before or "").splitlines()
|
||||
ssh.sendline("exit")
|
||||
|
||||
devices: dict[str, dict[str, str]] = {}
|
||||
for device in devices_result:
|
||||
if match := _DEVICES_REGEX.search(device.decode("utf-8")):
|
||||
if match := _DEVICES_REGEX.search(device):
|
||||
devices[match.group("ip")] = {
|
||||
"ip": match.group("ip"),
|
||||
"mac": match.group("mac").upper(),
|
||||
|
@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
}
|
||||
|
@ -46,24 +46,24 @@ from .websocket_api import async_register_websocket_api
|
||||
|
||||
__all__ = (
|
||||
"DOMAIN",
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_migrate_engine",
|
||||
"async_setup",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_update_pipeline",
|
||||
"EVENT_RECORDING",
|
||||
"OPTION_PREFERRED",
|
||||
"SAMPLES_PER_CHUNK",
|
||||
"SAMPLE_CHANNELS",
|
||||
"SAMPLE_RATE",
|
||||
"SAMPLE_WIDTH",
|
||||
"AudioSettings",
|
||||
"Pipeline",
|
||||
"PipelineEvent",
|
||||
"PipelineEventType",
|
||||
"PipelineNotFound",
|
||||
"WakeWordSettings",
|
||||
"EVENT_RECORDING",
|
||||
"OPTION_PREFERRED",
|
||||
"SAMPLES_PER_CHUNK",
|
||||
"SAMPLE_RATE",
|
||||
"SAMPLE_WIDTH",
|
||||
"SAMPLE_CHANNELS",
|
||||
"async_create_default_pipeline",
|
||||
"async_get_pipelines",
|
||||
"async_migrate_engine",
|
||||
"async_pipeline_from_audio_stream",
|
||||
"async_setup",
|
||||
"async_update_pipeline",
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
@ -108,6 +108,7 @@ async def async_pipeline_from_audio_stream(
|
||||
device_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Create an audio pipeline from an audio stream.
|
||||
|
||||
@ -119,6 +120,7 @@ async def async_pipeline_from_audio_stream(
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
|
@ -50,6 +50,7 @@ from homeassistant.util import (
|
||||
language as language_util,
|
||||
ulid as ulid_util,
|
||||
)
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
@ -91,6 +92,8 @@ ENGINE_LANGUAGE_PAIRS = (
|
||||
("tts_engine", "tts_language"),
|
||||
)
|
||||
|
||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
"""Validate language settings."""
|
||||
@ -248,7 +251,7 @@ async def async_create_default_pipeline(
|
||||
The default pipeline will use the homeassistant conversation agent and the
|
||||
specified stt / tts engines.
|
||||
"""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_store = pipeline_data.pipeline_store
|
||||
pipeline_settings = _async_resolve_default_pipeline_settings(
|
||||
hass,
|
||||
@ -283,7 +286,7 @@ def _async_get_pipeline_from_conversation_entity(
|
||||
@callback
|
||||
def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> Pipeline:
|
||||
"""Get a pipeline by id or the preferred pipeline."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
|
||||
if pipeline_id is None:
|
||||
# A pipeline was not specified, use the preferred one
|
||||
@ -306,7 +309,7 @@ def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> P
|
||||
@callback
|
||||
def async_get_pipelines(hass: HomeAssistant) -> list[Pipeline]:
|
||||
"""Get all pipelines."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
|
||||
return list(pipeline_data.pipeline_store.data.values())
|
||||
|
||||
@ -329,7 +332,7 @@ async def async_update_pipeline(
|
||||
prefer_local_intents: bool | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update a pipeline."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
|
||||
updates: dict[str, Any] = pipeline.to_json()
|
||||
updates.pop("id")
|
||||
@ -587,7 +590,7 @@ class PipelineRun:
|
||||
):
|
||||
raise InvalidPipelineStagesError(self.start_stage, self.end_stage)
|
||||
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
if self.pipeline.id not in pipeline_data.pipeline_debug:
|
||||
pipeline_data.pipeline_debug[self.pipeline.id] = LimitedSizeDict(
|
||||
size_limit=STORED_PIPELINE_RUNS
|
||||
@ -615,7 +618,7 @@ class PipelineRun:
|
||||
def process_event(self, event: PipelineEvent) -> None:
|
||||
"""Log an event and call listener."""
|
||||
self.event_callback(event)
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
if self.id not in pipeline_data.pipeline_debug[self.pipeline.id]:
|
||||
# This run has been evicted from the logged pipeline runs already
|
||||
return
|
||||
@ -650,7 +653,7 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_data.pipeline_runs.remove_run(self)
|
||||
|
||||
async def prepare_wake_word_detection(self) -> None:
|
||||
@ -1010,16 +1013,29 @@ class PipelineRun:
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
async def recognize_intent(
|
||||
self, intent_input: str, conversation_id: str | None, device_id: str | None
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str | None,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
# LLMs support all languages ('*') so use pipeline language for
|
||||
# intent fallback.
|
||||
input_language = self.pipeline.language
|
||||
# LLMs support all languages ('*') so use languages from the
|
||||
# pipeline for intent fallback.
|
||||
#
|
||||
# We prioritize the STT and TTS languages because they may be more
|
||||
# specific, such as "zh-CN" instead of just "zh". This is necessary
|
||||
# for languages whose intents are split out by region when
|
||||
# preferring local intent matching.
|
||||
input_language = (
|
||||
self.pipeline.stt_language
|
||||
or self.pipeline.tts_language
|
||||
or self.pipeline.language
|
||||
)
|
||||
else:
|
||||
input_language = self.pipeline.conversation_language
|
||||
|
||||
@ -1045,10 +1061,12 @@ class PipelineRun:
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
conversation_result: conversation.ConversationResult | None = None
|
||||
agent_id = user_input.agent_id
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
@ -1058,14 +1076,12 @@ class PipelineRun:
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
trigger_response = intent.IntentResponse(
|
||||
agent_id = "sentence_trigger"
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
trigger_response.async_set_speech(trigger_response_text)
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=trigger_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
@ -1073,13 +1089,31 @@ class PipelineRun:
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if conversation_result is None:
|
||||
# It was already handled, create response and add to chat history
|
||||
if intent_response is not None:
|
||||
async with conversation.async_get_chat_session(
|
||||
self.hass, user_input
|
||||
) as chat_session:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
chat_session.async_add_message(
|
||||
conversation.ChatMessage(
|
||||
role="assistant",
|
||||
agent_id=agent_id,
|
||||
content=speech,
|
||||
native=intent_response,
|
||||
)
|
||||
)
|
||||
conversation_result = conversation.ConversationResult(
|
||||
response=intent_response,
|
||||
conversation_id=chat_session.conversation_id,
|
||||
)
|
||||
|
||||
else:
|
||||
# Fall back to pipeline conversation agent
|
||||
conversation_result = await conversation.async_converse(
|
||||
hass=self.hass,
|
||||
@ -1090,6 +1124,10 @@ class PipelineRun:
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
)
|
||||
speech = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during intent recognition")
|
||||
raise IntentRecognitionError(
|
||||
@ -1109,10 +1147,6 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
speech: str = conversation_result.response.speech.get("plain", {}).get(
|
||||
"speech", ""
|
||||
)
|
||||
|
||||
return speech
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
@ -1213,7 +1247,7 @@ class PipelineRun:
|
||||
return
|
||||
|
||||
# Forward to device audio capture
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
audio_queue = pipeline_data.device_audio_queues.get(self._device_id)
|
||||
if audio_queue is None:
|
||||
return
|
||||
@ -1392,8 +1426,13 @@ class PipelineInput:
|
||||
"""Input for text-to-speech. Required when start_stage = tts."""
|
||||
|
||||
conversation_id: str | None = None
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
conversation_extra_system_prompt: str | None = None
|
||||
"""Extra prompt information for the conversation agent."""
|
||||
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
@ -1453,9 +1492,9 @@ class PipelineInput:
|
||||
if stt_audio_buffer:
|
||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||
# This is basically an async itertools.chain.
|
||||
async def buffer_then_audio_stream() -> (
|
||||
AsyncGenerator[EnhancedAudioChunk]
|
||||
):
|
||||
async def buffer_then_audio_stream() -> AsyncGenerator[
|
||||
EnhancedAudioChunk
|
||||
]:
|
||||
# Buffered audio
|
||||
for chunk in stt_audio_buffer:
|
||||
yield chunk
|
||||
@ -1483,6 +1522,7 @@ class PipelineInput:
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
@ -1864,7 +1904,7 @@ class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||
return old_data
|
||||
|
||||
|
||||
@singleton(DOMAIN)
|
||||
@singleton(KEY_ASSIST_PIPELINE, async_=True)
|
||||
async def async_setup_pipeline_store(hass: HomeAssistant) -> PipelineData:
|
||||
"""Set up the pipeline storage collection."""
|
||||
pipeline_store = PipelineStorageCollection(
|
||||
|
@ -9,8 +9,8 @@ from homeassistant.const import EntityCategory, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import collection, entity_registry as er, restore_state
|
||||
|
||||
from .const import DOMAIN, OPTION_PREFERRED
|
||||
from .pipeline import AssistDevice, PipelineData, PipelineStorageCollection
|
||||
from .const import OPTION_PREFERRED
|
||||
from .pipeline import KEY_ASSIST_PIPELINE, AssistDevice
|
||||
from .vad import VadSensitivity
|
||||
|
||||
|
||||
@ -30,7 +30,7 @@ def get_chosen_pipeline(
|
||||
if state is None or state.state == OPTION_PREFERRED:
|
||||
return None
|
||||
|
||||
pipeline_store: PipelineStorageCollection = hass.data[DOMAIN].pipeline_store
|
||||
pipeline_store = hass.data[KEY_ASSIST_PIPELINE].pipeline_store
|
||||
return next(
|
||||
(item.id for item in pipeline_store.async_items() if item.name == state.state),
|
||||
None,
|
||||
@ -80,7 +80,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
"""When entity is added to Home Assistant."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
pipeline_data: PipelineData = self.hass.data[DOMAIN]
|
||||
pipeline_data = self.hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_store = pipeline_data.pipeline_store
|
||||
self.async_on_remove(
|
||||
pipeline_store.async_add_change_set_listener(self._pipelines_updated)
|
||||
@ -116,9 +116,7 @@ class AssistPipelineSelect(SelectEntity, restore_state.RestoreEntity):
|
||||
@callback
|
||||
def _update_options(self) -> None:
|
||||
"""Handle pipeline update."""
|
||||
pipeline_store: PipelineStorageCollection = self.hass.data[
|
||||
DOMAIN
|
||||
].pipeline_store
|
||||
pipeline_store = self.hass.data[KEY_ASSIST_PIPELINE].pipeline_store
|
||||
options = [OPTION_PREFERRED]
|
||||
options.extend(sorted(item.name for item in pipeline_store.async_items()))
|
||||
self._attr_options = options
|
||||
|
@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.1
|
||||
speech_seconds: float = 0.3
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
|
@ -1,9 +1,6 @@
|
||||
"""Assist pipeline Websocket API."""
|
||||
|
||||
import asyncio
|
||||
|
||||
# Suppressing disable=deprecated-module is needed for Python 3.11
|
||||
import audioop # pylint: disable=deprecated-module
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
import contextlib
|
||||
@ -11,6 +8,7 @@ import logging
|
||||
import math
|
||||
from typing import Any, Final
|
||||
|
||||
import audioop # pylint: disable=deprecated-module
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, websocket_api
|
||||
@ -22,7 +20,6 @@ from homeassistant.util import language as language_util
|
||||
from .const import (
|
||||
DEFAULT_PIPELINE_TIMEOUT,
|
||||
DEFAULT_WAKE_WORD_TIMEOUT,
|
||||
DOMAIN,
|
||||
EVENT_RECORDING,
|
||||
SAMPLE_CHANNELS,
|
||||
SAMPLE_RATE,
|
||||
@ -30,9 +27,9 @@ from .const import (
|
||||
)
|
||||
from .error import PipelineNotFound
|
||||
from .pipeline import (
|
||||
KEY_ASSIST_PIPELINE,
|
||||
AudioSettings,
|
||||
DeviceAudioQueue,
|
||||
PipelineData,
|
||||
PipelineError,
|
||||
PipelineEvent,
|
||||
PipelineEventType,
|
||||
@ -284,7 +281,7 @@ def websocket_list_runs(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List pipeline runs for which debug data is available."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_id = msg["pipeline_id"]
|
||||
|
||||
if pipeline_id not in pipeline_data.pipeline_debug:
|
||||
@ -320,7 +317,7 @@ def websocket_list_devices(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""List assist devices."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
ent_reg = er.async_get(hass)
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
@ -351,7 +348,7 @@ def websocket_get_run(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get debug data for a pipeline run."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
pipeline_id = msg["pipeline_id"]
|
||||
pipeline_run_id = msg["pipeline_run_id"]
|
||||
|
||||
@ -456,7 +453,7 @@ async def websocket_device_capture(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Capture raw audio from a satellite device and forward to client."""
|
||||
pipeline_data: PipelineData = hass.data[DOMAIN]
|
||||
pipeline_data = hass.data[KEY_ASSIST_PIPELINE]
|
||||
device_id = msg["device_id"]
|
||||
|
||||
# Number of seconds to record audio in wall clock time
|
||||
|
@ -30,8 +30,8 @@ from .websocket_api import async_register_websocket_api
|
||||
__all__ = [
|
||||
"DOMAIN",
|
||||
"AssistSatelliteAnnouncement",
|
||||
"AssistSatelliteEntity",
|
||||
"AssistSatelliteConfiguration",
|
||||
"AssistSatelliteEntity",
|
||||
"AssistSatelliteEntityDescription",
|
||||
"AssistSatelliteEntityFeature",
|
||||
"AssistSatelliteWakeWord",
|
||||
|
@ -96,7 +96,11 @@ class AssistSatelliteAnnouncement:
|
||||
media_id: str
|
||||
"""Media ID to be played."""
|
||||
|
||||
original_media_id: str
|
||||
"""The raw media ID before processing."""
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"]
|
||||
"""Source of the media ID."""
|
||||
|
||||
|
||||
class AssistSatelliteEntity(entity.Entity):
|
||||
@ -187,47 +191,10 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
"""
|
||||
await self._cancel_running_pipeline()
|
||||
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
|
||||
if message is None:
|
||||
message = ""
|
||||
|
||||
if not media_id:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
|
||||
tts_options: dict[str, Any] = {}
|
||||
if pipeline.tts_voice is not None:
|
||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||
|
||||
if self.tts_options is not None:
|
||||
tts_options.update(self.tts_options)
|
||||
|
||||
media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
message,
|
||||
engine=pipeline.tts_engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
media_id_source = "media_id"
|
||||
media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
media_id,
|
||||
None,
|
||||
)
|
||||
media_id = media.url
|
||||
|
||||
if not media_id_source:
|
||||
media_id_source = "url"
|
||||
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
announcement = await self._resolve_announcement_media_id(message, media_id)
|
||||
|
||||
if self._is_announcing:
|
||||
raise SatelliteBusyError
|
||||
@ -237,9 +204,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
|
||||
try:
|
||||
# Block until announcement is finished
|
||||
await self.async_announce(
|
||||
AssistSatelliteAnnouncement(message, media_id, media_id_source)
|
||||
)
|
||||
await self.async_announce(announcement)
|
||||
finally:
|
||||
self._is_announcing = False
|
||||
self._set_state(AssistSatelliteState.IDLE)
|
||||
@ -428,3 +393,54 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
vad_sensitivity = vad.VadSensitivity(vad_sensitivity_state.state)
|
||||
|
||||
return vad.VadSensitivity.to_seconds(vad_sensitivity)
|
||||
|
||||
async def _resolve_announcement_media_id(
|
||||
self, message: str, media_id: str | None
|
||||
) -> AssistSatelliteAnnouncement:
|
||||
"""Resolve the media ID."""
|
||||
media_id_source: Literal["url", "media_id", "tts"] | None = None
|
||||
|
||||
if media_id:
|
||||
original_media_id = media_id
|
||||
|
||||
else:
|
||||
media_id_source = "tts"
|
||||
# Synthesize audio and get URL
|
||||
pipeline_id = self._resolve_pipeline()
|
||||
pipeline = async_get_pipeline(self.hass, pipeline_id)
|
||||
|
||||
tts_options: dict[str, Any] = {}
|
||||
if pipeline.tts_voice is not None:
|
||||
tts_options[tts.ATTR_VOICE] = pipeline.tts_voice
|
||||
|
||||
if self.tts_options is not None:
|
||||
tts_options.update(self.tts_options)
|
||||
|
||||
media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
message,
|
||||
engine=pipeline.tts_engine,
|
||||
language=pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
original_media_id = media_id
|
||||
|
||||
if media_source.is_media_source_id(media_id):
|
||||
if not media_id_source:
|
||||
media_id_source = "media_id"
|
||||
media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
media_id,
|
||||
None,
|
||||
)
|
||||
media_id = media.url
|
||||
|
||||
if not media_id_source:
|
||||
media_id_source = "url"
|
||||
|
||||
# Resolve to full URL
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
|
||||
return AssistSatelliteAnnouncement(
|
||||
message, media_id, original_media_id, media_id_source
|
||||
)
|
||||
|
69
homeassistant/components/assist_satellite/intent.py
Normal file
69
homeassistant/components/assist_satellite/intent.py
Normal file
@ -0,0 +1,69 @@
|
||||
"""Assist Satellite intents."""
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er, intent
|
||||
|
||||
from .const import DOMAIN, AssistSatelliteEntityFeature
|
||||
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the intents."""
|
||||
intent.async_register(hass, BroadcastIntentHandler())
|
||||
|
||||
|
||||
class BroadcastIntentHandler(intent.IntentHandler):
|
||||
"""Broadcast a message."""
|
||||
|
||||
intent_type = intent.INTENT_BROADCAST
|
||||
description = "Broadcast a message through the home"
|
||||
|
||||
@property
|
||||
def slot_schema(self) -> dict | None:
|
||||
"""Return a slot schema."""
|
||||
return {vol.Required("message"): str}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Broadcast a message."""
|
||||
hass = intent_obj.hass
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
# Find all assist satellite entities that are not the one invoking the intent
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity in hass.states.async_entity_ids(DOMAIN)
|
||||
if (entry := ent_reg.async_get(entity))
|
||||
and entry.supported_features & AssistSatelliteEntityFeature.ANNOUNCE
|
||||
}
|
||||
|
||||
if intent_obj.device_id:
|
||||
entities = {
|
||||
entity: entry
|
||||
for entity, entry in entities.items()
|
||||
if entry.device_id != intent_obj.device_id
|
||||
}
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"announce",
|
||||
{"message": intent_obj.slots["message"]["value"]},
|
||||
blocking=True,
|
||||
context=intent_obj.context,
|
||||
target={"entity_id": list(entities)},
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.async_set_speech("Done")
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
type=intent.IntentResponseTargetType.ENTITY,
|
||||
id=entity,
|
||||
name=state.name if (state := hass.states.get(entity)) else entity,
|
||||
)
|
||||
for entity in entities
|
||||
]
|
||||
)
|
||||
return response
|
@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
"requirements": ["pyaussiebb==0.1.5"]
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, Protocol, cast
|
||||
|
||||
from propcache import cached_property
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
@ -636,9 +636,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
alias = ""
|
||||
if "trigger" in run_variables:
|
||||
if "description" in run_variables["trigger"]:
|
||||
reason = f' by {run_variables["trigger"]["description"]}'
|
||||
reason = f" by {run_variables['trigger']['description']}"
|
||||
if "alias" in run_variables["trigger"]:
|
||||
alias = f' trigger \'{run_variables["trigger"]["alias"]}\''
|
||||
alias = f" trigger '{run_variables['trigger']['alias']}'"
|
||||
self._logger.debug("Automation%s triggered%s", alias, reason)
|
||||
|
||||
# Create a new context referring to the old context.
|
||||
|
@ -11,11 +11,12 @@ from python_awair.exceptions import AuthError, AwairError
|
||||
from python_awair.user import AwairUser
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import onboarding, zeroconf
|
||||
from homeassistant.components import onboarding
|
||||
from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DEVICE, CONF_HOST
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
@ -29,7 +30,7 @@ class AwairFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
host: str
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
|
||||
|
@ -10,7 +10,6 @@ from urllib.parse import urlsplit
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import dhcp, ssdp, zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
@ -32,6 +31,14 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.ssdp import (
|
||||
ATTR_UPNP_FRIENDLY_NAME,
|
||||
ATTR_UPNP_PRESENTATION_URL,
|
||||
ATTR_UPNP_SERIAL,
|
||||
SsdpServiceInfo,
|
||||
)
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
from homeassistant.util.network import is_link_local
|
||||
|
||||
@ -190,7 +197,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: dhcp.DhcpServiceInfo
|
||||
self, discovery_info: DhcpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a DHCP discovered Axis device."""
|
||||
return await self._process_discovered_device(
|
||||
@ -203,21 +210,21 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
)
|
||||
|
||||
async def async_step_ssdp(
|
||||
self, discovery_info: ssdp.SsdpServiceInfo
|
||||
self, discovery_info: SsdpServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a SSDP discovered Axis device."""
|
||||
url = urlsplit(discovery_info.upnp[ssdp.ATTR_UPNP_PRESENTATION_URL])
|
||||
url = urlsplit(discovery_info.upnp[ATTR_UPNP_PRESENTATION_URL])
|
||||
return await self._process_discovered_device(
|
||||
{
|
||||
CONF_HOST: url.hostname,
|
||||
CONF_MAC: format_mac(discovery_info.upnp[ssdp.ATTR_UPNP_SERIAL]),
|
||||
CONF_NAME: f"{discovery_info.upnp[ssdp.ATTR_UPNP_FRIENDLY_NAME]}",
|
||||
CONF_MAC: format_mac(discovery_info.upnp[ATTR_UPNP_SERIAL]),
|
||||
CONF_NAME: f"{discovery_info.upnp[ATTR_UPNP_FRIENDLY_NAME]}",
|
||||
CONF_PORT: url.port,
|
||||
}
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Prepare configuration for a Zeroconf discovered Axis device."""
|
||||
return await self._process_discovered_device(
|
||||
|
@ -2,10 +2,10 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Setup your Azure Data Explorer integration",
|
||||
"title": "Set up Azure Data Explorer",
|
||||
"description": "Enter connection details",
|
||||
"data": {
|
||||
"cluster_ingest_uri": "Cluster Ingest URI",
|
||||
"cluster_ingest_uri": "Cluster ingestion URI",
|
||||
"authority_id": "Authority ID",
|
||||
"client_id": "Client ID",
|
||||
"client_secret": "Client secret",
|
||||
@ -14,7 +14,7 @@
|
||||
"use_queued_ingestion": "Use queued ingestion"
|
||||
},
|
||||
"data_description": {
|
||||
"cluster_ingest_uri": "Ingest-URI of the cluster",
|
||||
"cluster_ingest_uri": "Ingestion URI of the cluster",
|
||||
"use_queued_ingestion": "Must be enabled when using ADX free cluster"
|
||||
}
|
||||
}
|
||||
|
@ -2,26 +2,26 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Set up your Azure Event Hub integration",
|
||||
"title": "Set up Azure Event Hub",
|
||||
"data": {
|
||||
"event_hub_instance_name": "Event Hub Instance Name",
|
||||
"use_connection_string": "Use Connection String"
|
||||
"event_hub_instance_name": "Event Hub instance name",
|
||||
"use_connection_string": "Use connection string"
|
||||
}
|
||||
},
|
||||
"conn_string": {
|
||||
"title": "Connection String method",
|
||||
"title": "Connection string method",
|
||||
"description": "Please enter the connection string for: {event_hub_instance_name}",
|
||||
"data": {
|
||||
"event_hub_connection_string": "Event Hub Connection String"
|
||||
"event_hub_connection_string": "Event Hub connection string"
|
||||
}
|
||||
},
|
||||
"sas": {
|
||||
"title": "SAS Credentials method",
|
||||
"title": "SAS credentials method",
|
||||
"description": "Please enter the SAS (shared access signature) credentials for: {event_hub_instance_name}",
|
||||
"data": {
|
||||
"event_hub_namespace": "Event Hub Namespace",
|
||||
"event_hub_sas_policy": "Event Hub SAS Policy",
|
||||
"event_hub_sas_key": "Event Hub SAS Key"
|
||||
"event_hub_namespace": "Event Hub namespace",
|
||||
"event_hub_sas_policy": "Event Hub SAS policy",
|
||||
"event_hub_sas_key": "Event Hub SAS key"
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -38,7 +38,7 @@
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Options for the Azure Event Hub.",
|
||||
"title": "Options for Azure Event Hub.",
|
||||
"data": {
|
||||
"send_interval": "Interval between sending batches to the hub."
|
||||
}
|
||||
|
@ -21,10 +21,13 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
@ -33,16 +36,19 @@ from .websocket import async_register_websocket_handlers
|
||||
__all__ = [
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"ManagerBackup",
|
||||
"BackupAgent",
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"WrittenBackup",
|
||||
]
|
||||
|
||||
@ -82,8 +88,26 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
password=None,
|
||||
)
|
||||
|
||||
async def async_handle_create_automatic_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating automatic backups."""
|
||||
config_data = backup_manager.config.data
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
|
||||
if not with_hassio:
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
hass.services.async_register(
|
||||
DOMAIN, "create_automatic", async_handle_create_automatic_service
|
||||
)
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
|
@ -7,7 +7,7 @@ from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from pathlib import Path
|
||||
from typing import Any, Protocol
|
||||
|
||||
from propcache import cached_property
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
@ -5,8 +5,10 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
import datetime as dt
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
@ -17,16 +19,22 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
|
||||
# The time of the automatic backup event should be compatible with
|
||||
# the time of the recorder's nightly job which runs at 04:12.
|
||||
# Run the backup at 04:45.
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
CRON_PATTERN_DAILY = "{m} {h} * * *"
|
||||
CRON_PATTERN_WEEKLY = "{m} {h} * * {d}"
|
||||
|
||||
# The default time for automatic backups to run is at 04:45.
|
||||
# This time is chosen to be compatible with the time of the recorder's
|
||||
# nightly job which runs at 04:12.
|
||||
DEFAULT_BACKUP_TIME = dt.time(4, 45)
|
||||
|
||||
# Randomize the start time of the backup by up to 60 minutes to avoid
|
||||
# all backups running at the same time.
|
||||
BACKUP_START_TIME_JITTER = 60 * 60
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
@ -69,6 +77,12 @@ class BackupConfigData:
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
if time_str := data["schedule"]["time"]:
|
||||
time = dt_util.parse_time(time_str)
|
||||
else:
|
||||
time = None
|
||||
days = [Day(day) for day in data["schedule"]["days"]]
|
||||
|
||||
return cls(
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
@ -85,7 +99,12 @@ class BackupConfigData:
|
||||
copies=retention["copies"],
|
||||
days=retention["days"],
|
||||
),
|
||||
schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])),
|
||||
schedule=BackupSchedule(
|
||||
days=days,
|
||||
recurrence=ScheduleRecurrence(data["schedule"]["recurrence"]),
|
||||
state=ScheduleState(data["schedule"].get("state", ScheduleState.NEVER)),
|
||||
time=time,
|
||||
),
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupConfig:
|
||||
@ -124,6 +143,7 @@ class BackupConfig:
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@ -131,7 +151,7 @@ class BackupConfig:
|
||||
*,
|
||||
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
|
||||
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleState | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleParametersDict | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update config."""
|
||||
if create_backup is not UNDEFINED:
|
||||
@ -142,7 +162,7 @@ class BackupConfig:
|
||||
self.data.retention = new_retention
|
||||
self.data.retention.apply(self._manager)
|
||||
if schedule is not UNDEFINED:
|
||||
new_schedule = BackupSchedule(state=schedule)
|
||||
new_schedule = BackupSchedule(**schedule)
|
||||
if new_schedule.to_dict() != self.data.schedule.to_dict():
|
||||
self.data.schedule = new_schedule
|
||||
self.data.schedule.apply(self._manager)
|
||||
@ -160,8 +180,13 @@ class RetentionConfig:
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
)
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
@ -231,11 +256,46 @@ class RetentionParametersDict(TypedDict, total=False):
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
days: list[Day]
|
||||
recurrence: ScheduleRecurrence
|
||||
state: ScheduleState
|
||||
time: str | None
|
||||
|
||||
|
||||
class ScheduleParametersDict(TypedDict, total=False):
|
||||
"""Represent parameters for backup schedule."""
|
||||
|
||||
days: list[Day]
|
||||
recurrence: ScheduleRecurrence
|
||||
state: ScheduleState
|
||||
time: dt.time | None
|
||||
|
||||
|
||||
class Day(StrEnum):
|
||||
"""Represent the day(s) in a custom schedule recurrence."""
|
||||
|
||||
MONDAY = "mon"
|
||||
TUESDAY = "tue"
|
||||
WEDNESDAY = "wed"
|
||||
THURSDAY = "thu"
|
||||
FRIDAY = "fri"
|
||||
SATURDAY = "sat"
|
||||
SUNDAY = "sun"
|
||||
|
||||
|
||||
class ScheduleRecurrence(StrEnum):
|
||||
"""Represent the schedule recurrence."""
|
||||
|
||||
NEVER = "never"
|
||||
DAILY = "daily"
|
||||
CUSTOM_DAYS = "custom_days"
|
||||
|
||||
|
||||
class ScheduleState(StrEnum):
|
||||
"""Represent the schedule state."""
|
||||
"""Represent the schedule recurrence.
|
||||
|
||||
This is deprecated and can be remove in HA Core 2025.8.
|
||||
"""
|
||||
|
||||
NEVER = "never"
|
||||
DAILY = "daily"
|
||||
@ -252,8 +312,15 @@ class ScheduleState(StrEnum):
|
||||
class BackupSchedule:
|
||||
"""Represent the backup schedule."""
|
||||
|
||||
days: list[Day] = field(default_factory=list)
|
||||
recurrence: ScheduleRecurrence = ScheduleRecurrence.NEVER
|
||||
# Although no longer used, state is kept for backwards compatibility.
|
||||
# It can be removed in HA Core 2025.8.
|
||||
state: ScheduleState = ScheduleState.NEVER
|
||||
time: dt.time | None = None
|
||||
cron_event: CronSim | None = field(init=False, default=None)
|
||||
next_automatic_backup: datetime | None = field(init=False, default=None)
|
||||
next_automatic_backup_additional = False
|
||||
|
||||
@callback
|
||||
def apply(
|
||||
@ -262,17 +329,27 @@ class BackupSchedule:
|
||||
) -> None:
|
||||
"""Apply a new schedule.
|
||||
|
||||
There are only three possible state types: never, daily, or weekly.
|
||||
There are only three possible recurrence types: never, daily, or custom_days
|
||||
"""
|
||||
if self.state is ScheduleState.NEVER:
|
||||
if self.recurrence is ScheduleRecurrence.NEVER or (
|
||||
self.recurrence is ScheduleRecurrence.CUSTOM_DAYS and not self.days
|
||||
):
|
||||
self._unschedule_next(manager)
|
||||
return
|
||||
|
||||
if self.state is ScheduleState.DAILY:
|
||||
self._schedule_next(CRON_PATTERN_DAILY, manager)
|
||||
else:
|
||||
time = self.time if self.time is not None else DEFAULT_BACKUP_TIME
|
||||
if self.recurrence is ScheduleRecurrence.DAILY:
|
||||
self._schedule_next(
|
||||
CRON_PATTERN_WEEKLY.format(self.state.value),
|
||||
CRON_PATTERN_DAILY.format(m=time.minute, h=time.hour),
|
||||
manager,
|
||||
)
|
||||
else: # ScheduleRecurrence.CUSTOM_DAYS
|
||||
self._schedule_next(
|
||||
CRON_PATTERN_WEEKLY.format(
|
||||
m=time.minute,
|
||||
h=time.hour,
|
||||
d=",".join(day.value for day in self.days),
|
||||
),
|
||||
manager,
|
||||
)
|
||||
|
||||
@ -293,12 +370,23 @@ class BackupSchedule:
|
||||
if next_time < now:
|
||||
# schedule a backup at next daily time once
|
||||
# if we missed the last scheduled backup
|
||||
cron_event = CronSim(CRON_PATTERN_DAILY, now)
|
||||
time = self.time if self.time is not None else DEFAULT_BACKUP_TIME
|
||||
cron_event = CronSim(
|
||||
CRON_PATTERN_DAILY.format(m=time.minute, h=time.hour), now
|
||||
)
|
||||
next_time = next(cron_event)
|
||||
# reseed the cron event attribute
|
||||
# add a day to the next time to avoid scheduling at the same time again
|
||||
self.cron_event = CronSim(cron_pattern, now + timedelta(days=1))
|
||||
|
||||
# Compare the computed next time with the next time from the cron pattern
|
||||
# to determine if an additional backup has been scheduled
|
||||
cron_event_configured = CronSim(cron_pattern, now)
|
||||
next_configured_time = next(cron_event_configured)
|
||||
self.next_automatic_backup_additional = next_time < next_configured_time
|
||||
else:
|
||||
self.next_automatic_backup_additional = False
|
||||
|
||||
async def _create_backup(now: datetime) -> None:
|
||||
"""Create backup."""
|
||||
manager.remove_next_backup_event = None
|
||||
@ -318,22 +406,34 @@ class BackupSchedule:
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
if self.time is None:
|
||||
# randomize the start time of the backup by up to 60 minutes if the time is
|
||||
# not set to avoid all backups running at the same time
|
||||
next_time += timedelta(seconds=random.randint(0, BACKUP_START_TIME_JITTER))
|
||||
LOGGER.debug("Scheduling next automatic backup at %s", next_time)
|
||||
self.next_automatic_backup = next_time
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupSchedule:
|
||||
"""Convert backup schedule to a dict."""
|
||||
return StoredBackupSchedule(state=self.state)
|
||||
return StoredBackupSchedule(
|
||||
days=self.days,
|
||||
recurrence=self.recurrence,
|
||||
state=self.state,
|
||||
time=self.time.isoformat() if self.time else None,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next backup."""
|
||||
self.next_automatic_backup = None
|
||||
if (remove_next_event := manager.remove_next_backup_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_backup_event = None
|
||||
|
@ -4,18 +4,23 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from http import HTTPStatus
|
||||
from typing import cast
|
||||
import threading
|
||||
from typing import IO, cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
from multidict import istr
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import util
|
||||
from .agent import BackupAgent
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
|
||||
|
||||
@callback
|
||||
@ -43,8 +48,13 @@ class DownloadBackupView(HomeAssistantView):
|
||||
agent_id = request.query.getone("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
try:
|
||||
password = request.query.getone("password")
|
||||
except KeyError:
|
||||
password = None
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
hass = request.app[KEY_HASS]
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
@ -58,6 +68,24 @@ class DownloadBackupView(HomeAssistantView):
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
|
||||
if not password:
|
||||
return await self._send_backup_no_password(
|
||||
request, headers, backup_id, agent_id, agent, manager
|
||||
)
|
||||
return await self._send_backup_with_password(
|
||||
hass, request, headers, backup_id, agent_id, password, agent, manager
|
||||
)
|
||||
|
||||
async def _send_backup_no_password(
|
||||
self,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
agent_id: str,
|
||||
agent: BackupAgent,
|
||||
manager: BackupManager,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
@ -70,6 +98,50 @@ class DownloadBackupView(HomeAssistantView):
|
||||
await response.write(chunk)
|
||||
return response
|
||||
|
||||
async def _send_backup_with_password(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
request: Request,
|
||||
headers: dict[istr, str],
|
||||
backup_id: str,
|
||||
agent_id: str,
|
||||
password: str,
|
||||
agent: BackupAgent,
|
||||
manager: BackupManager,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
reader: IO[bytes]
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
try:
|
||||
reader = await hass.async_add_executor_job(open, path.as_posix(), "rb")
|
||||
except FileNotFoundError:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
else:
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], util.AsyncIteratorReader(hass, stream))
|
||||
|
||||
worker_done_event = asyncio.Event()
|
||||
|
||||
def on_done() -> None:
|
||||
"""Call by the worker thread when it's done."""
|
||||
hass.loop.call_soon_threadsafe(worker_done_event.set)
|
||||
|
||||
stream = util.AsyncIteratorWriter(hass)
|
||||
worker = threading.Thread(
|
||||
target=util.decrypt_backup, args=[reader, stream, password, on_done]
|
||||
)
|
||||
try:
|
||||
worker.start()
|
||||
response = StreamResponse(status=HTTPStatus.OK, headers=headers)
|
||||
await response.prepare(request)
|
||||
async for chunk in stream:
|
||||
await response.write(chunk)
|
||||
return response
|
||||
finally:
|
||||
reader.close()
|
||||
await worker_done_event.wait()
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
@ -2,6 +2,9 @@
|
||||
"services": {
|
||||
"create": {
|
||||
"service": "mdi:cloud-upload"
|
||||
},
|
||||
"create_automatic": {
|
||||
"service": "mdi:cloud-upload"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,11 +10,11 @@ from enum import StrEnum
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Protocol, TypedDict
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
@ -31,6 +31,7 @@ from homeassistant.helpers import (
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import util as backup_util
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
@ -46,13 +47,15 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, Folder
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
|
||||
|
||||
class IncorrectPasswordError(HomeAssistantError):
|
||||
"""Raised when the password is incorrect."""
|
||||
from .util import (
|
||||
AsyncIteratorReader,
|
||||
make_backup_dir,
|
||||
read_backup,
|
||||
validate_password,
|
||||
validate_password_stream,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
@ -144,6 +147,7 @@ class RestoreBackupState(StrEnum):
|
||||
"""Receive backup state enum."""
|
||||
|
||||
COMPLETED = "completed"
|
||||
CORE_RESTART = "core_restart"
|
||||
FAILED = "failed"
|
||||
IN_PROGRESS = "in_progress"
|
||||
|
||||
@ -214,7 +218,7 @@ class BackupReaderWriter(abc.ABC):
|
||||
include_database: bool,
|
||||
include_folders: list[Folder] | None,
|
||||
include_homeassistant: bool,
|
||||
on_progress: Callable[[ManagerStateEvent], None],
|
||||
on_progress: Callable[[CreateBackupEvent], None],
|
||||
password: str | None,
|
||||
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
|
||||
"""Create a backup."""
|
||||
@ -235,6 +239,7 @@ class BackupReaderWriter(abc.ABC):
|
||||
backup_id: str,
|
||||
*,
|
||||
agent_id: str,
|
||||
on_progress: Callable[[RestoreBackupEvent], None],
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
password: str | None,
|
||||
restore_addons: list[str] | None,
|
||||
@ -245,6 +250,22 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
_message = "The password provided is incorrect."
|
||||
|
||||
|
||||
class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
"""Raised when on-the-fly decryption is not supported."""
|
||||
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@ -373,7 +394,9 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@ -386,7 +409,9 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@ -422,11 +447,25 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
agent_id = agent_ids[idx]
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error("Upload failed for %s: %s", agent_id, result)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_id] = result
|
||||
LOGGER.error(
|
||||
"Unexpected error for %s: %s", agent_id, result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@ -449,7 +488,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@ -499,7 +538,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
@ -563,7 +602,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@ -578,7 +617,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@ -652,6 +691,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
@ -669,11 +709,12 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
@ -692,6 +733,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
except Exception:
|
||||
@ -714,57 +756,81 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
if include_all_addons and include_addons:
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
|
||||
or f"{'Automatic' if with_automatic_settings else 'Custom'} backup {HAVERSION}"
|
||||
)
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
except Exception as err:
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@ -777,28 +843,40 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@ -814,7 +892,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@ -829,6 +907,9 @@ class BackupManager:
|
||||
restore_folders=restore_folders,
|
||||
restore_homeassistant=restore_homeassistant,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
|
||||
@ -851,7 +932,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@ -862,6 +943,7 @@ class BackupManager:
|
||||
backup_id=backup_id,
|
||||
open_stream=open_backup,
|
||||
agent_id=agent_id,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
restore_addons=restore_addons,
|
||||
restore_database=restore_database,
|
||||
@ -926,6 +1008,41 @@ class BackupManager:
|
||||
translation_placeholders={"failed_agents": ", ".join(agent_errors)},
|
||||
)
|
||||
|
||||
async def async_can_decrypt_on_download(
|
||||
self,
|
||||
backup_id: str,
|
||||
*,
|
||||
agent_id: str,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Check if we are able to decrypt the backup on download."""
|
||||
try:
|
||||
agent = self.backup_agents[agent_id]
|
||||
except KeyError as err:
|
||||
raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
reader: IO[bytes]
|
||||
if agent_id in self.local_backup_agents:
|
||||
local_agent = self.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
reader = await self.hass.async_add_executor_job(open, path.as_posix(), "rb")
|
||||
else:
|
||||
backup_stream = await agent.async_download_backup(backup_id)
|
||||
reader = cast(IO[bytes], AsyncIteratorReader(self.hass, backup_stream))
|
||||
try:
|
||||
validate_password_stream(reader, password)
|
||||
except backup_util.IncorrectPassword as err:
|
||||
raise IncorrectPasswordError from err
|
||||
except backup_util.UnsupportedSecureTarVersion as err:
|
||||
raise DecryptOnDowloadNotSupported from err
|
||||
except backup_util.DecryptError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
|
||||
class KnownBackups:
|
||||
"""Track known backups."""
|
||||
@ -1016,7 +1133,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
include_database: bool,
|
||||
include_folders: list[Folder] | None,
|
||||
include_homeassistant: bool,
|
||||
on_progress: Callable[[ManagerStateEvent], None],
|
||||
on_progress: Callable[[CreateBackupEvent], None],
|
||||
password: str | None,
|
||||
) -> tuple[NewBackup, asyncio.Task[WrittenBackup]]:
|
||||
"""Initiate generating a backup."""
|
||||
@ -1024,11 +1141,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
@ -1056,7 +1173,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
date_str: str,
|
||||
extra_metadata: dict[str, bool | str],
|
||||
include_database: bool,
|
||||
on_progress: Callable[[ManagerStateEvent], None],
|
||||
on_progress: Callable[[CreateBackupEvent], None],
|
||||
password: str | None,
|
||||
) -> WrittenBackup:
|
||||
"""Generate a backup."""
|
||||
@ -1099,6 +1216,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
@ -1116,12 +1240,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@ -1129,14 +1256,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
await manager.async_post_backup_actions()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@ -1154,6 +1287,17 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if not database_included:
|
||||
excludes = excludes + EXCLUDE_DATABASE_FROM_BACKUP
|
||||
|
||||
def is_excluded_by_filter(path: PurePath) -> bool:
|
||||
"""Filter to filter excludes."""
|
||||
|
||||
for exclude in excludes:
|
||||
if not path.match(exclude):
|
||||
continue
|
||||
LOGGER.debug("Ignoring %s because of %s", path, exclude)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
outer_secure_tarfile = SecureTarFile(
|
||||
tar_file_path, "w", gzip=False, bufsize=BUF_SIZE
|
||||
)
|
||||
@ -1172,7 +1316,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
atomic_contents_add(
|
||||
tar_file=core_tar,
|
||||
origin_path=Path(self._hass.config.path()),
|
||||
excludes=excludes,
|
||||
file_filter=is_excluded_by_filter,
|
||||
arcname="data",
|
||||
)
|
||||
return (tar_file_path, tar_file_path.stat().st_size)
|
||||
@ -1206,6 +1350,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
tar_file_path = temp_file
|
||||
@ -1236,6 +1381,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
*,
|
||||
agent_id: str,
|
||||
on_progress: Callable[[RestoreBackupEvent], None],
|
||||
password: str | None,
|
||||
restore_addons: list[str] | None,
|
||||
restore_database: bool,
|
||||
@ -1249,11 +1395,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
@ -1280,7 +1426,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
validate_password, path, password
|
||||
)
|
||||
if not password_valid:
|
||||
raise IncorrectPasswordError("The password provided is incorrect.")
|
||||
raise IncorrectPasswordError
|
||||
|
||||
def _write_restore_file() -> None:
|
||||
"""Write the restore file."""
|
||||
@ -1298,7 +1444,10 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
on_progress(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.CORE_RESTART)
|
||||
)
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2024.11.0"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.1.3"]
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@ -67,3 +69,7 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
@ -1 +1,2 @@
|
||||
create:
|
||||
create_automatic:
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
from typing import TYPE_CHECKING, Any, TypedDict
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.storage import Store
|
||||
@ -16,6 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 2
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@ -25,14 +26,52 @@ class StoredBackupData(TypedDict):
|
||||
config: StoredBackupConfig
|
||||
|
||||
|
||||
class _BackupStore(Store[StoredBackupData]):
|
||||
"""Class to help storing backup data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize storage class."""
|
||||
super().__init__(
|
||||
hass,
|
||||
STORAGE_VERSION,
|
||||
STORAGE_KEY,
|
||||
minor_version=STORAGE_VERSION_MINOR,
|
||||
)
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
old_major_version: int,
|
||||
old_minor_version: int,
|
||||
old_data: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Migrate to the new version."""
|
||||
data = old_data
|
||||
if old_major_version == 1:
|
||||
if old_minor_version < 2:
|
||||
# Version 1.2 adds configurable backup time and custom days
|
||||
data["config"]["schedule"]["time"] = None
|
||||
if (state := data["config"]["schedule"]["state"]) in ("daily", "never"):
|
||||
data["config"]["schedule"]["days"] = []
|
||||
data["config"]["schedule"]["recurrence"] = state
|
||||
else:
|
||||
data["config"]["schedule"]["days"] = [state]
|
||||
data["config"]["schedule"]["recurrence"] = "custom_days"
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
if old_major_version > 2:
|
||||
raise NotImplementedError
|
||||
return data
|
||||
|
||||
|
||||
class BackupStore:
|
||||
"""Store backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
"""Initialize the backup store."""
|
||||
self._hass = hass
|
||||
self._manager = manager
|
||||
self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
self._store = _BackupStore(hass)
|
||||
|
||||
async def load(self) -> StoredBackupData | None:
|
||||
"""Load the store."""
|
||||
|
@ -5,14 +5,18 @@
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create": {
|
||||
"name": "Create backup",
|
||||
"description": "Creates a new backup."
|
||||
},
|
||||
"create_automatic": {
|
||||
"name": "Create automatic backup",
|
||||
"description": "Creates a new backup with automatic backup settings."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,22 +3,51 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from collections.abc import AsyncIterator, Callable
|
||||
import copy
|
||||
from io import BytesIO
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
from typing import cast
|
||||
from typing import IO, Self, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile
|
||||
from securetar import SecureTarError, SecureTarFile, SecureTarReadError
|
||||
|
||||
from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
|
||||
|
||||
class DecryptError(HomeAssistantError):
|
||||
"""Error during decryption."""
|
||||
|
||||
_message = "Unexpected error during decryption."
|
||||
|
||||
|
||||
class UnsupportedSecureTarVersion(DecryptError):
|
||||
"""Unsupported securetar version."""
|
||||
|
||||
_message = "Unsupported securetar version."
|
||||
|
||||
|
||||
class IncorrectPassword(DecryptError):
|
||||
"""Invalid password or corrupted backup."""
|
||||
|
||||
_message = "Invalid password or corrupted backup."
|
||||
|
||||
|
||||
class BackupEmpty(DecryptError):
|
||||
"""No tar files found in the backup."""
|
||||
|
||||
_message = "No tar files found in the backup."
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
@ -106,6 +135,159 @@ def validate_password(path: Path, password: str | None) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
class AsyncIteratorReader:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, stream: AsyncIterator[bytes]) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._hass = hass
|
||||
self._stream = stream
|
||||
self._buffer: bytes | None = None
|
||||
self._pos: int = 0
|
||||
|
||||
async def _next(self) -> bytes | None:
|
||||
"""Get the next chunk from the iterator."""
|
||||
return await anext(self._stream, None)
|
||||
|
||||
def read(self, n: int = -1, /) -> bytes:
|
||||
"""Read data from the iterator."""
|
||||
result = bytearray()
|
||||
while n < 0 or len(result) < n:
|
||||
if not self._buffer:
|
||||
self._buffer = asyncio.run_coroutine_threadsafe(
|
||||
self._next(), self._hass.loop
|
||||
).result()
|
||||
self._pos = 0
|
||||
if not self._buffer:
|
||||
# The stream is exhausted
|
||||
break
|
||||
chunk = self._buffer[self._pos : self._pos + n]
|
||||
result.extend(chunk)
|
||||
n -= len(chunk)
|
||||
self._pos += len(chunk)
|
||||
if self._pos == len(self._buffer):
|
||||
self._buffer = None
|
||||
return bytes(result)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close the iterator."""
|
||||
|
||||
|
||||
class AsyncIteratorWriter:
|
||||
"""Wrap an AsyncIterator."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the wrapper."""
|
||||
self._hass = hass
|
||||
self._queue: asyncio.Queue[bytes | None] = asyncio.Queue(maxsize=1)
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Return the iterator."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Get the next chunk from the iterator."""
|
||||
if data := await self._queue.get():
|
||||
return data
|
||||
raise StopAsyncIteration
|
||||
|
||||
def write(self, s: bytes, /) -> int:
|
||||
"""Write data to the iterator."""
|
||||
asyncio.run_coroutine_threadsafe(self._queue.put(s), self._hass.loop).result()
|
||||
return len(s)
|
||||
|
||||
|
||||
def validate_password_stream(
|
||||
input_stream: IO[bytes],
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
with (
|
||||
tarfile.open(fileobj=input_stream, mode="r|", bufsize=BUF_SIZE) as input_tar,
|
||||
):
|
||||
for obj in input_tar:
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
)
|
||||
with istf.decrypt(obj) as decrypted:
|
||||
if istf.securetar_header.plaintext_size is None:
|
||||
raise UnsupportedSecureTarVersion
|
||||
try:
|
||||
decrypted.read(1) # Read a single byte to trigger the decryption
|
||||
except SecureTarReadError as err:
|
||||
raise IncorrectPassword from err
|
||||
return
|
||||
raise BackupEmpty
|
||||
|
||||
|
||||
def decrypt_backup(
|
||||
input_stream: IO[bytes],
|
||||
output_stream: IO[bytes],
|
||||
password: str | None,
|
||||
on_done: Callable[[], None],
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
try:
|
||||
with (
|
||||
tarfile.open(
|
||||
fileobj=input_stream, mode="r|", bufsize=BUF_SIZE
|
||||
) as input_tar,
|
||||
tarfile.open(
|
||||
fileobj=output_stream, mode="w|", bufsize=BUF_SIZE
|
||||
) as output_tar,
|
||||
):
|
||||
_decrypt_backup(input_tar, output_tar, password)
|
||||
except (DecryptError, SecureTarError, tarfile.TarError) as err:
|
||||
LOGGER.warning("Error decrypting backup: %s", err)
|
||||
finally:
|
||||
output_stream.write(b"") # Write an empty chunk to signal the end of the stream
|
||||
on_done()
|
||||
|
||||
|
||||
def _decrypt_backup(
|
||||
input_tar: tarfile.TarFile,
|
||||
output_tar: tarfile.TarFile,
|
||||
password: str | None,
|
||||
) -> None:
|
||||
"""Decrypt a backup."""
|
||||
for obj in input_tar:
|
||||
# We compare with PurePath to avoid issues with different path separators,
|
||||
# for example when backup.json is added as "./backup.json"
|
||||
if PurePath(obj.name) == PurePath("backup.json"):
|
||||
# Rewrite the backup.json file to indicate that the backup is decrypted
|
||||
if not (reader := input_tar.extractfile(obj)):
|
||||
raise DecryptError
|
||||
metadata = json_loads_object(reader.read())
|
||||
metadata["protected"] = False
|
||||
updated_metadata_b = json.dumps(metadata).encode()
|
||||
metadata_obj = copy.deepcopy(obj)
|
||||
metadata_obj.size = len(updated_metadata_b)
|
||||
output_tar.addfile(metadata_obj, BytesIO(updated_metadata_b))
|
||||
continue
|
||||
if not obj.name.endswith((".tar", ".tgz", ".tar.gz")):
|
||||
output_tar.addfile(obj, input_tar.extractfile(obj))
|
||||
continue
|
||||
istf = SecureTarFile(
|
||||
None, # Not used
|
||||
gzip=False,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=input_tar.extractfile(obj),
|
||||
)
|
||||
with istf.decrypt(obj) as decrypted:
|
||||
if (plaintext_size := istf.securetar_header.plaintext_size) is None:
|
||||
raise UnsupportedSecureTarVersion
|
||||
decrypted_obj = copy.deepcopy(obj)
|
||||
decrypted_obj.size = plaintext_size
|
||||
output_tar.addfile(decrypted_obj, decrypted)
|
||||
|
||||
|
||||
async def receive_file(
|
||||
hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path
|
||||
) -> None:
|
||||
|
@ -6,10 +6,15 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .config import ScheduleState
|
||||
from .config import Day, ScheduleRecurrence
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import IncorrectPasswordError, ManagerStateEvent
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .models import Folder
|
||||
|
||||
|
||||
@ -24,6 +29,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
|
||||
websocket_api.async_register_command(hass, handle_details)
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_can_decrypt_on_download)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
@ -54,6 +60,8 @@ async def handle_info(
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
|
||||
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
|
||||
},
|
||||
)
|
||||
|
||||
@ -147,6 +155,38 @@ async def handle_restore(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/can_decrypt_on_download",
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Required("password"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_can_decrypt_on_download(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Check if the supplied password is correct."""
|
||||
try:
|
||||
await hass.data[DATA_MANAGER].async_can_decrypt_on_download(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
except DecryptOnDowloadNotSupported:
|
||||
connection.send_error(
|
||||
msg["id"], "decrypt_not_supported", "Decrypt on download not supported"
|
||||
)
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
@ -281,10 +321,18 @@ async def handle_config_info(
|
||||
) -> None:
|
||||
"""Send the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
config = manager.config.data.to_dict()
|
||||
# Remove state from schedule, it's not needed in the frontend
|
||||
# mypy doesn't like deleting from TypedDict, ignore it
|
||||
del config["schedule"]["state"] # type: ignore[misc]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"config": manager.config.data.to_dict(),
|
||||
"config": config
|
||||
| {
|
||||
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
|
||||
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@ -314,7 +362,17 @@ async def handle_config_info(
|
||||
vol.Optional("days"): vol.Any(int, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)),
|
||||
vol.Optional("schedule"): vol.Schema(
|
||||
{
|
||||
vol.Optional("days"): vol.Any(
|
||||
vol.All([vol.Coerce(Day)], vol.Unique()),
|
||||
),
|
||||
vol.Optional("recurrence"): vol.All(
|
||||
str, vol.Coerce(ScheduleRecurrence)
|
||||
),
|
||||
vol.Optional("time"): vol.Any(cv.time, None),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
@ -10,9 +10,9 @@ from aiobafi6 import Device, Service
|
||||
from aiobafi6.discovery import PORT
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_IP_ADDRESS
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, RUN_TIMEOUT
|
||||
from .models import BAFDiscovery
|
||||
@ -44,7 +44,7 @@ class BAFFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self.discovery: BAFDiscovery | None = None
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery."""
|
||||
if discovery_info.ip_address.version == 6:
|
||||
|
@ -20,7 +20,7 @@ class BalboaEntity(Entity):
|
||||
"""Initialize the control."""
|
||||
mac = client.mac_address
|
||||
model = client.model
|
||||
self._attr_unique_id = f'{model}-{key}-{mac.replace(":","")[-6:]}'
|
||||
self._attr_unique_id = f"{model}-{key}-{mac.replace(':', '')[-6:]}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, mac)},
|
||||
name=model,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user