mirror of
https://github.com/home-assistant/core.git
synced 2025-07-21 12:17:07 +00:00
Merge branch 'dev' into mill
This commit is contained in:
commit
16b6da9c91
@ -6,6 +6,7 @@ core: &core
|
||||
- homeassistant/helpers/**
|
||||
- homeassistant/package_constraints.txt
|
||||
- homeassistant/util/**
|
||||
- mypy.ini
|
||||
- pyproject.toml
|
||||
- requirements.txt
|
||||
- setup.cfg
|
||||
@ -131,6 +132,7 @@ tests: &tests
|
||||
- tests/components/conftest.py
|
||||
- tests/components/diagnostics/**
|
||||
- tests/components/history/**
|
||||
- tests/components/light/common.py
|
||||
- tests/components/logbook/**
|
||||
- tests/components/recorder/**
|
||||
- tests/components/repairs/**
|
||||
|
4
.github/workflows/builder.yml
vendored
4
.github/workflows/builder.yml
vendored
@ -517,7 +517,7 @@ jobs:
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
|
||||
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
48
.github/workflows/ci.yaml
vendored
48
.github/workflows/ci.yaml
vendored
@ -240,7 +240,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@ -256,7 +256,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@ -286,7 +286,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -295,7 +295,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@ -326,7 +326,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -335,7 +335,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@ -366,7 +366,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -375,7 +375,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@ -482,7 +482,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@ -490,7 +490,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@ -578,7 +578,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -611,7 +611,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -649,7 +649,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -692,7 +692,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -739,7 +739,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -791,7 +791,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -799,7 +799,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.1.2
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@ -865,7 +865,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -929,7 +929,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1050,7 +1050,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1179,7 +1179,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1273,7 +1273,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.0.7
|
||||
uses: codecov/codecov-action@v5.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@ -1325,7 +1325,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@ -1411,7 +1411,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.0.7
|
||||
uses: codecov/codecov-action@v5.1.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.5
|
||||
uses: github/codeql-action/init@v3.27.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.5
|
||||
uses: github/codeql-action/analyze@v3.27.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
27
.github/workflows/wheels.yml
vendored
27
.github/workflows/wheels.yml
vendored
@ -197,33 +197,6 @@ jobs:
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
# Build these first.
|
||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||
|
||||
touch requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_old-cython.txt"
|
||||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
|
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.1
|
||||
rev: v0.8.3
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@ -137,6 +137,7 @@ homeassistant.components.co2signal.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
homeassistant.components.cookidoo.*
|
||||
homeassistant.components.counter.*
|
||||
homeassistant.components.cover.*
|
||||
homeassistant.components.cpuspeed.*
|
||||
@ -169,6 +170,7 @@ homeassistant.components.easyenergy.*
|
||||
homeassistant.components.ecovacs.*
|
||||
homeassistant.components.ecowitt.*
|
||||
homeassistant.components.efergy.*
|
||||
homeassistant.components.eheimdigital.*
|
||||
homeassistant.components.electrasmart.*
|
||||
homeassistant.components.electric_kiwi.*
|
||||
homeassistant.components.elevenlabs.*
|
||||
@ -269,6 +271,7 @@ homeassistant.components.ios.*
|
||||
homeassistant.components.iotty.*
|
||||
homeassistant.components.ipp.*
|
||||
homeassistant.components.iqvia.*
|
||||
homeassistant.components.iron_os.*
|
||||
homeassistant.components.islamic_prayer_times.*
|
||||
homeassistant.components.isy994.*
|
||||
homeassistant.components.jellyfin.*
|
||||
@ -365,6 +368,7 @@ homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
homeassistant.components.private_ble_device.*
|
||||
homeassistant.components.prometheus.*
|
||||
@ -401,6 +405,7 @@ homeassistant.components.romy.*
|
||||
homeassistant.components.rpi_power.*
|
||||
homeassistant.components.rss_feed_template.*
|
||||
homeassistant.components.rtsp_to_webrtc.*
|
||||
homeassistant.components.russound_rio.*
|
||||
homeassistant.components.ruuvi_gateway.*
|
||||
homeassistant.components.ruuvitag_ble.*
|
||||
homeassistant.components.samsungtv.*
|
||||
@ -439,7 +444,6 @@ homeassistant.components.ssdp.*
|
||||
homeassistant.components.starlink.*
|
||||
homeassistant.components.statistics.*
|
||||
homeassistant.components.steamist.*
|
||||
homeassistant.components.stookalert.*
|
||||
homeassistant.components.stookwijzer.*
|
||||
homeassistant.components.stream.*
|
||||
homeassistant.components.streamlabswater.*
|
||||
|
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@ -16,7 +16,7 @@
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest --timeout=10 tests",
|
||||
"command": "${command:python.interpreterPath} -m pytest --timeout=10 tests",
|
||||
"dependsOn": ["Install all Test Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
@ -31,7 +31,7 @@
|
||||
{
|
||||
"label": "Pytest (changed tests only)",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest --timeout=10 --picked",
|
||||
"command": "${command:python.interpreterPath} -m pytest --timeout=10 --picked",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@ -89,7 +89,7 @@
|
||||
"label": "Code Coverage",
|
||||
"detail": "Generate code coverage report for a given integration.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||
"dependsOn": ["Compile English translations"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
@ -105,7 +105,7 @@
|
||||
"label": "Update syrupy snapshots",
|
||||
"detail": "Update syrupy snapshots for a given integration.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m pytest ./tests/components/${input:integrationName} --snapshot-update",
|
||||
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName} --snapshot-update",
|
||||
"dependsOn": ["Compile English translations"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
@ -163,7 +163,7 @@
|
||||
"label": "Compile English translations",
|
||||
"detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m script.translations develop --all",
|
||||
"command": "${command:python.interpreterPath} -m script.translations develop --all",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
@ -173,7 +173,7 @@
|
||||
"label": "Run scaffold",
|
||||
"detail": "Add new functionality to a integration using a scaffold.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
|
||||
"command": "${command:python.interpreterPath} -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
@ -183,7 +183,7 @@
|
||||
"label": "Create new integration",
|
||||
"detail": "Use the scaffold to create a new integration.",
|
||||
"type": "shell",
|
||||
"command": "python3 -m script.scaffold integration",
|
||||
"command": "${command:python.interpreterPath} -m script.scaffold integration",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
|
24
CODEOWNERS
24
CODEOWNERS
@ -284,6 +284,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
/tests/components/coolmaster/ @OnFreund
|
||||
/homeassistant/components/counter/ @fabaff
|
||||
@ -385,6 +387,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/efergy/ @tkdrob
|
||||
/tests/components/efergy/ @tkdrob
|
||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||
/homeassistant/components/eheimdigital/ @autinerd
|
||||
/tests/components/eheimdigital/ @autinerd
|
||||
/homeassistant/components/electrasmart/ @jafar-atili
|
||||
/tests/components/electrasmart/ @jafar-atili
|
||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||
@ -727,8 +731,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/tests/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/homeassistant/components/iotty/ @shapournemati-iotty
|
||||
/tests/components/iotty/ @shapournemati-iotty
|
||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||
/homeassistant/components/ipma/ @dgomes
|
||||
/tests/components/ipma/ @dgomes
|
||||
@ -753,6 +757,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ista_ecotrend/ @tr4nt0r
|
||||
/homeassistant/components/isy994/ @bdraco @shbatm
|
||||
/tests/components/isy994/ @bdraco @shbatm
|
||||
/homeassistant/components/ituran/ @shmuelzon
|
||||
/tests/components/ituran/ @shmuelzon
|
||||
/homeassistant/components/izone/ @Swamp-Ig
|
||||
/tests/components/izone/ @Swamp-Ig
|
||||
/homeassistant/components/jellyfin/ @j-stienstra @ctalkington
|
||||
@ -1047,6 +1053,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/octoprint/ @rfleming71
|
||||
/tests/components/octoprint/ @rfleming71
|
||||
/homeassistant/components/ohmconnect/ @robbiet480
|
||||
/homeassistant/components/ohme/ @dan-r
|
||||
/tests/components/ohme/ @dan-r
|
||||
/homeassistant/components/ollama/ @synesthesiam
|
||||
/tests/components/ollama/ @synesthesiam
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
@ -1133,6 +1141,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
/tests/components/powerfox/ @klaasnicolaas
|
||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/homeassistant/components/private_ble_device/ @Jc2k
|
||||
@ -1355,6 +1365,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||
/homeassistant/components/slide/ @ualex73
|
||||
/homeassistant/components/slide_local/ @dontinelli
|
||||
/tests/components/slide_local/ @dontinelli
|
||||
/homeassistant/components/slimproto/ @marcelveldt
|
||||
/tests/components/slimproto/ @marcelveldt
|
||||
/homeassistant/components/sma/ @kellerza @rklomp
|
||||
@ -1413,15 +1425,13 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/starline/ @anonym-tsk
|
||||
/homeassistant/components/starlink/ @boswelja
|
||||
/tests/components/starlink/ @boswelja
|
||||
/homeassistant/components/statistics/ @ThomDietrich
|
||||
/tests/components/statistics/ @ThomDietrich
|
||||
/homeassistant/components/statistics/ @ThomDietrich @gjohansson-ST
|
||||
/tests/components/statistics/ @ThomDietrich @gjohansson-ST
|
||||
/homeassistant/components/steam_online/ @tkdrob
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stookalert/ @fwestenberg @frenck
|
||||
/tests/components/stookalert/ @fwestenberg @frenck
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
@ -1644,6 +1654,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/waqi/ @joostlek
|
||||
/homeassistant/components/water_heater/ @home-assistant/core
|
||||
/tests/components/water_heater/ @home-assistant/core
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
/homeassistant/components/watttime/ @bachya
|
||||
/tests/components/watttime/ @bachya
|
||||
|
@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.4
|
||||
RUN pip3 install uv==0.5.8
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.12
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
|
10
build.yaml
10
build.yaml
@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@ -115,7 +115,7 @@ class AuthManagerFlowManager(
|
||||
*,
|
||||
context: AuthFlowContext | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> LoginFlow:
|
||||
) -> LoginFlow[Any]:
|
||||
"""Create a login flow."""
|
||||
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
|
||||
if not auth_provider:
|
||||
|
@ -4,8 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
from typing import Any, Generic
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -34,6 +35,12 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MultiFactorAuthModuleT = TypeVar(
|
||||
"_MultiFactorAuthModuleT",
|
||||
bound="MultiFactorAuthModule",
|
||||
default="MultiFactorAuthModule",
|
||||
)
|
||||
|
||||
|
||||
class MultiFactorAuthModule:
|
||||
"""Multi-factor Auth Module of validation function."""
|
||||
@ -71,7 +78,7 @@ class MultiFactorAuthModule:
|
||||
"""Return a voluptuous schema to define mfa auth module's input."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@ -95,11 +102,14 @@ class MultiFactorAuthModule:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SetupFlow(data_entry_flow.FlowHandler):
|
||||
class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str
|
||||
self,
|
||||
auth_module: _MultiFactorAuthModuleT,
|
||||
setup_schema: vol.Schema,
|
||||
user_id: str,
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
self._auth_module = auth_module
|
||||
|
@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
return sorted(unordered_services)
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> NotifySetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
await self.hass.services.async_call("notify", notify_service, data)
|
||||
|
||||
|
||||
class NotifySetupFlow(SetupFlow):
|
||||
class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow):
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
super().__init__(auth_module, setup_schema, user_id)
|
||||
# to fix typing complaint
|
||||
self._auth_module: NotifyAuthModule = auth_module
|
||||
self._available_notify_services = available_notify_services
|
||||
self._secret: str | None = None
|
||||
self._count: int | None = None
|
||||
|
@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
self._users[user_id] = ota_secret # type: ignore[index]
|
||||
return ota_secret
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> TotpSetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@ -174,10 +174,9 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
|
||||
|
||||
|
||||
class TotpSetupFlow(SetupFlow):
|
||||
class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
_auth_module: TotpAuthModule
|
||||
_ota_secret: str
|
||||
_url: str
|
||||
_image: str
|
||||
|
@ -5,8 +5,9 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import types
|
||||
from typing import Any
|
||||
from typing import Any, Generic
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -46,6 +47,8 @@ AUTH_PROVIDER_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider")
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
@ -105,7 +108,7 @@ class AuthProvider:
|
||||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]:
|
||||
"""Return the data flow for logging in with auth provider.
|
||||
|
||||
Auth provider should extend LoginFlow and return an instance.
|
||||
@ -192,12 +195,15 @@ async def load_auth_provider_module(
|
||||
return module
|
||||
|
||||
|
||||
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
||||
class LoginFlow(
|
||||
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
Generic[_AuthProviderT],
|
||||
):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
_flow_result = AuthFlowResult
|
||||
|
||||
def __init__(self, auth_provider: AuthProvider) -> None:
|
||||
def __init__(self, auth_provider: _AuthProviderT) -> None:
|
||||
"""Initialize the login flow."""
|
||||
self._auth_provider = auth_provider
|
||||
self._auth_module_id: str | None = None
|
||||
|
@ -6,7 +6,7 @@ import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> CommandLineLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return CommandLineLoginFlow(self)
|
||||
|
||||
@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
)
|
||||
|
||||
|
||||
class CommandLineLoginFlow(LoginFlow):
|
||||
class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow):
|
||||
if user_input is not None:
|
||||
user_input["username"] = user_input["username"].strip()
|
||||
try:
|
||||
await cast(
|
||||
CommandLineAuthProvider, self._auth_provider
|
||||
).async_validate_login(user_input["username"], user_input["password"])
|
||||
await self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
|
@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
|
||||
await data.async_load()
|
||||
self.data = data
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return HassLoginFlow(self)
|
||||
|
||||
@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider):
|
||||
pass
|
||||
|
||||
|
||||
class HassLoginFlow(LoginFlow):
|
||||
class HassLoginFlow(LoginFlow[HassAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await cast(HassAuthProvider, self._auth_provider).async_validate_login(
|
||||
await self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuth:
|
||||
|
@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import hmac
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError):
|
||||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> ExampleLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return ExampleLoginFlow(self)
|
||||
|
||||
@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider):
|
||||
return UserMeta(name=name, is_active=True)
|
||||
|
||||
|
||||
class ExampleLoginFlow(LoginFlow):
|
||||
class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
cast(ExampleAuthProvider, self._auth_provider).async_validate_login(
|
||||
self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuthError:
|
||||
|
@ -104,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
"""Trusted Networks auth provider does not support MFA."""
|
||||
return False
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> TrustedNetworksLoginFlow:
|
||||
"""Return a flow to login."""
|
||||
assert context is not None
|
||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||
@ -214,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
self.async_validate_access(ip_address(remote_ip))
|
||||
|
||||
|
||||
class TrustedNetworksLoginFlow(LoginFlow):
|
||||
class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
def __init__(
|
||||
@ -235,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
||||
) -> AuthFlowResult:
|
||||
"""Handle the step of the form."""
|
||||
try:
|
||||
cast(
|
||||
TrustedNetworksAuthProvider, self._auth_provider
|
||||
).async_validate_access(self._ip_address)
|
||||
self._auth_provider.async_validate_access(self._ip_address)
|
||||
|
||||
except InvalidAuthError:
|
||||
return self.async_abort(reason="not_allowed")
|
||||
|
@ -1,6 +1,10 @@
|
||||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@ -14,7 +18,12 @@ import securetar
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_PATHS = ("backups",)
|
||||
KEEP_BACKUPS = ("backups",)
|
||||
KEEP_DATABASE = (
|
||||
"home-assistant_v2.db",
|
||||
"home-assistant_v2.db-wal",
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -24,6 +33,21 @@ class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
password: str | None
|
||||
remove_after_restore: bool
|
||||
restore_database: bool
|
||||
restore_homeassistant: bool
|
||||
|
||||
|
||||
def password_to_key(password: str) -> bytes:
|
||||
"""Generate a AES Key from password.
|
||||
|
||||
Matches the implementation in supervisor.backups.utils.password_to_key.
|
||||
"""
|
||||
key: bytes = password.encode()
|
||||
for _ in range(100):
|
||||
key = hashlib.sha256(key).digest()
|
||||
return key[:16]
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
@ -32,20 +56,24 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent |
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
backup_file_path=Path(instruction_content["path"]),
|
||||
password=instruction_content["password"],
|
||||
remove_after_restore=instruction_content["remove_after_restore"],
|
||||
restore_database=instruction_content["restore_database"],
|
||||
restore_homeassistant=instruction_content["restore_homeassistant"],
|
||||
)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
except (FileNotFoundError, KeyError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None:
|
||||
"""Delete all files and directories in the config directory except entries in the keep list."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in keep]
|
||||
entries_to_remove = sorted(
|
||||
entry for entry in config_dir.iterdir() if entry not in keep_paths
|
||||
)
|
||||
|
||||
for entry in config_contents:
|
||||
for entry in entries_to_remove:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
@ -54,12 +82,15 @@ def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
def _extract_backup(
|
||||
config_dir: Path,
|
||||
restore_content: RestoreBackupFileContent,
|
||||
) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
backup_file_path,
|
||||
restore_content.backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
@ -88,22 +119,41 @@ def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
key=password_to_key(restore_content.password)
|
||||
if restore_content.password is not None
|
||||
else None,
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
path=Path(tempdir, "homeassistant"),
|
||||
members=securetar.secure_path(istf),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
if restore_content.restore_homeassistant:
|
||||
keep = list(KEEP_BACKUPS)
|
||||
if not restore_content.restore_database:
|
||||
keep.extend(KEEP_DATABASE)
|
||||
_clear_configuration_directory(config_dir, keep)
|
||||
shutil.copytree(
|
||||
Path(tempdir, "homeassistant", "data"),
|
||||
config_dir,
|
||||
dirs_exist_ok=True,
|
||||
ignore=shutil.ignore_patterns(*(keep)),
|
||||
)
|
||||
elif restore_content.restore_database:
|
||||
for entry in KEEP_DATABASE:
|
||||
entrypath = config_dir / entry
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
for entry in KEEP_DATABASE:
|
||||
shutil.copy(
|
||||
Path(tempdir, "homeassistant", "data", entry),
|
||||
config_dir,
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
@ -119,8 +169,13 @@ def restore_backup(config_dir_path: str) -> bool:
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
_extract_backup(
|
||||
config_dir=config_dir,
|
||||
restore_content=restore_content,
|
||||
)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
if restore_content.remove_after_restore:
|
||||
backup_file_path.unlink(missing_ok=True)
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
||||
|
@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If only cadata is passed, we can ignore it
|
||||
kwargs = mapped_args.get("kwargs")
|
||||
return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs)
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class BlockingCall:
|
||||
"""Class to hold information about a blocking call."""
|
||||
@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
original_func=SSLContext.load_verify_locations,
|
||||
object=SSLContext,
|
||||
function="load_verify_locations",
|
||||
check_allowed=None,
|
||||
check_allowed=_check_load_verify_locations_call_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
|
5
homeassistant/brands/slide.json
Normal file
5
homeassistant/brands/slide.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "slide",
|
||||
"name": "Slide",
|
||||
"integrations": ["slide", "slide_local"]
|
||||
}
|
@ -9,18 +9,16 @@ from jaraco.abode.devices.light import Light
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_HS_COLOR,
|
||||
DEFAULT_MAX_KELVIN,
|
||||
DEFAULT_MIN_KELVIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_kelvin_to_mired,
|
||||
color_temperature_mired_to_kelvin,
|
||||
)
|
||||
|
||||
from . import AbodeSystem
|
||||
from .const import DOMAIN
|
||||
@ -44,13 +42,13 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
|
||||
_device: Light
|
||||
_attr_name = None
|
||||
_attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||
_attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable:
|
||||
self._device.set_color_temp(
|
||||
int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]))
|
||||
)
|
||||
if ATTR_COLOR_TEMP_KELVIN in kwargs and self._device.is_color_capable:
|
||||
self._device.set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN])
|
||||
return
|
||||
|
||||
if ATTR_HS_COLOR in kwargs and self._device.is_color_capable:
|
||||
@ -85,10 +83,10 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
return None
|
||||
|
||||
@property
|
||||
def color_temp(self) -> int | None:
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the color temp of the light."""
|
||||
if self._device.has_color:
|
||||
return color_temperature_kelvin_to_mired(self._device.color_temp)
|
||||
return int(self._device.color_temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
|
@ -16,6 +16,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
|
@ -25,5 +25,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"requirements": ["aioacaia==0.1.10"]
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
}
|
||||
|
@ -21,6 +21,9 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaSensorEntityDescription(SensorEntityDescription):
|
||||
|
@ -75,7 +75,6 @@ class AdaxDevice(ClimateEntity):
|
||||
)
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
|
||||
"""Initialize the heater."""
|
||||
|
@ -102,7 +102,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_attr_name = None
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
_support_preset = ClimateEntityFeature(0)
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||
@ -261,7 +260,6 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an AdvantageAir Zone control."""
|
||||
|
@ -95,7 +95,6 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator, ac_number, info):
|
||||
"""Initialize the climate device."""
|
||||
@ -205,7 +204,6 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = AT_GROUP_MODES
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator, group_number, info):
|
||||
"""Initialize the climate device."""
|
||||
|
@ -124,7 +124,6 @@ class Airtouch5ClimateEntity(ClimateEntity, Airtouch5Entity):
|
||||
_attr_translation_key = DOMAIN
|
||||
_attr_target_temperature_step = 1
|
||||
_attr_name = None
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
|
||||
class Airtouch5AC(Airtouch5ClimateEntity):
|
||||
|
@ -136,7 +136,6 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
||||
_attr_name = None
|
||||
_speeds: dict[int, str] = {}
|
||||
_speeds_reverse: dict[str, int] = {}
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -177,7 +177,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||
|
||||
_attr_name = None
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def _init_attributes(self) -> None:
|
||||
"""Init common climate device attributes."""
|
||||
@ -194,12 +193,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
|
||||
if (
|
||||
self.get_airzone_value(AZD_SPEED) is not None
|
||||
and self.get_airzone_value(AZD_SPEEDS) is not None
|
||||
):
|
||||
self._initialize_fan_speeds()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
@ -214,8 +207,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||
self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[
|
||||
self.get_airzone_value(AZD_ACTION)
|
||||
]
|
||||
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
||||
if self.get_airzone_value(AZD_POWER):
|
||||
self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[
|
||||
self.get_airzone_value(AZD_MODE)
|
||||
@ -252,6 +243,22 @@ class AirzoneDeviceClimate(AirzoneClimate):
|
||||
_speeds: dict[int, str]
|
||||
_speeds_reverse: dict[str, int]
|
||||
|
||||
def _init_attributes(self) -> None:
|
||||
"""Init common climate device attributes."""
|
||||
super()._init_attributes()
|
||||
if (
|
||||
self.get_airzone_value(AZD_SPEED) is not None
|
||||
and self.get_airzone_value(AZD_SPEEDS) is not None
|
||||
):
|
||||
self._initialize_fan_speeds()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update climate attributes."""
|
||||
super()._async_update_attrs()
|
||||
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
||||
|
||||
def _initialize_fan_speeds(self) -> None:
|
||||
"""Initialize fan speeds."""
|
||||
azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS)
|
||||
|
@ -355,12 +355,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
@cached_property
|
||||
def supported_features(self) -> AlarmControlPanelEntityFeature:
|
||||
"""Return the list of supported features."""
|
||||
features = self._attr_supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = AlarmControlPanelEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
return self._attr_supported_features
|
||||
|
||||
@final
|
||||
@property
|
||||
|
@ -317,6 +317,7 @@ class Alexa(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -403,6 +404,7 @@ class AlexaPowerController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -436,7 +438,7 @@ class AlexaPowerController(AlexaCapability):
|
||||
elif self.entity.domain == remote.DOMAIN:
|
||||
is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN)
|
||||
elif self.entity.domain == vacuum.DOMAIN:
|
||||
is_on = self.entity.state == vacuum.STATE_CLEANING
|
||||
is_on = self.entity.state == vacuum.VacuumActivity.CLEANING
|
||||
elif self.entity.domain == timer.DOMAIN:
|
||||
is_on = self.entity.state != STATE_IDLE
|
||||
elif self.entity.domain == water_heater.DOMAIN:
|
||||
@ -469,6 +471,7 @@ class AlexaLockController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -523,6 +526,7 @@ class AlexaSceneController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -562,6 +566,7 @@ class AlexaBrightnessController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -611,6 +616,7 @@ class AlexaColorController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -669,6 +675,7 @@ class AlexaColorTemperatureController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -715,6 +722,7 @@ class AlexaSpeaker(AlexaCapability):
|
||||
"fr-FR", # Not documented as of 2021-12-04, see PR #60489
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
}
|
||||
|
||||
def name(self) -> str:
|
||||
@ -772,6 +780,7 @@ class AlexaStepSpeaker(AlexaCapability):
|
||||
"es-ES",
|
||||
"fr-FR", # Not documented as of 2021-12-04, see PR #60489
|
||||
"it-IT",
|
||||
"nl-NL",
|
||||
}
|
||||
|
||||
def name(self) -> str:
|
||||
@ -801,6 +810,7 @@ class AlexaPlaybackController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -859,6 +869,7 @@ class AlexaInputController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -1104,6 +1115,7 @@ class AlexaThermostatController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -1245,6 +1257,7 @@ class AlexaPowerLevelController(AlexaCapability):
|
||||
"fr-CA",
|
||||
"fr-FR",
|
||||
"it-IT",
|
||||
"nl-NL",
|
||||
"ja-JP",
|
||||
}
|
||||
|
||||
@ -1723,6 +1736,7 @@ class AlexaRangeController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2066,6 +2080,7 @@ class AlexaToggleController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2212,6 +2227,7 @@ class AlexaPlaybackStateReporter(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2267,6 +2283,7 @@ class AlexaSeekController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2360,6 +2377,7 @@ class AlexaEqualizerController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@ -2470,6 +2488,7 @@ class AlexaCameraStreamController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
|
@ -59,6 +59,7 @@ CONF_SUPPORTED_LOCALES = (
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
)
|
||||
|
||||
|
@ -359,7 +359,7 @@ async def async_api_set_color_temperature(
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_KELVIN: kelvin},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: kelvin},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@ -376,14 +376,14 @@ async def async_api_decrease_color_temp(
|
||||
) -> AlexaResponse:
|
||||
"""Process a decrease color temperature request."""
|
||||
entity = directive.entity
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP])
|
||||
max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS])
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
|
||||
min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN])
|
||||
|
||||
value = min(max_mireds, current + 50)
|
||||
value = max(min_kelvin, current - 500)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@ -400,14 +400,14 @@ async def async_api_increase_color_temp(
|
||||
) -> AlexaResponse:
|
||||
"""Process an increase color temperature request."""
|
||||
entity = directive.entity
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP])
|
||||
min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS])
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
|
||||
max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN])
|
||||
|
||||
value = max(min_mireds, current - 50)
|
||||
value = min(max_kelvin, current + 500)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@ -527,6 +527,7 @@ async def async_api_unlock(
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}:
|
||||
msg = (
|
||||
|
@ -110,7 +110,7 @@ def _setup_androidtv(
|
||||
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
|
||||
|
||||
else:
|
||||
# Use "pure-python-adb" (communicate with ADB server)
|
||||
# Communicate via ADB server
|
||||
signer = None
|
||||
adb_log = (
|
||||
"using ADB server at"
|
||||
@ -135,15 +135,16 @@ async def async_connect_androidtv(
|
||||
)
|
||||
|
||||
aftv = await async_androidtv_setup(
|
||||
config[CONF_HOST],
|
||||
config[CONF_PORT],
|
||||
adbkey,
|
||||
config.get(CONF_ADB_SERVER_IP),
|
||||
config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
|
||||
state_detection_rules,
|
||||
config[CONF_DEVICE_CLASS],
|
||||
timeout,
|
||||
signer,
|
||||
host=config[CONF_HOST],
|
||||
port=config[CONF_PORT],
|
||||
adbkey=adbkey,
|
||||
adb_server_ip=config.get(CONF_ADB_SERVER_IP),
|
||||
adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
|
||||
state_detection_rules=state_detection_rules,
|
||||
device_class=config[CONF_DEVICE_CLASS],
|
||||
auth_timeout_s=timeout,
|
||||
signer=signer,
|
||||
log_errors=False,
|
||||
)
|
||||
|
||||
if not aftv.available:
|
||||
|
@ -151,5 +151,5 @@ class AndroidTVEntity(Entity):
|
||||
# Using "adb_shell" (Python ADB implementation)
|
||||
self.exceptions = ADB_PYTHON_EXCEPTIONS
|
||||
else:
|
||||
# Using "pure-python-adb" (communicate with ADB server)
|
||||
# Communicate via ADB server
|
||||
self.exceptions = ADB_TCP_EXCEPTIONS
|
||||
|
@ -6,10 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["adb_shell", "androidtv", "pure_python_adb"],
|
||||
"requirements": [
|
||||
"adb-shell[async]==0.4.4",
|
||||
"androidtv[async]==0.0.75",
|
||||
"pure-python-adb[async]==0.3.0.dev0"
|
||||
]
|
||||
"loggers": ["adb_shell", "androidtv"],
|
||||
"requirements": ["adb-shell[async]==0.4.4", "androidtv[async]==0.0.75"]
|
||||
}
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.11"]
|
||||
"requirements": ["py-aosmith==1.0.12"]
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import time
|
||||
from typing import Any, Literal, cast
|
||||
import wave
|
||||
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import (
|
||||
@ -29,6 +30,7 @@ from homeassistant.components import (
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import intent
|
||||
@ -917,6 +919,11 @@ class PipelineRun:
|
||||
)
|
||||
except (asyncio.CancelledError, TimeoutError):
|
||||
raise # expected
|
||||
except hass_nabucasa.auth.Unauthenticated as src_error:
|
||||
raise SpeechToTextError(
|
||||
code="cloud-auth-failed",
|
||||
message="Home Assistant Cloud authentication failed",
|
||||
) from src_error
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during speech-to-text")
|
||||
raise SpeechToTextError(
|
||||
@ -1009,15 +1016,23 @@ class PipelineRun:
|
||||
if self.intent_agent is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
# LLMs support all languages ('*') so use pipeline language for
|
||||
# intent fallback.
|
||||
input_language = self.pipeline.language
|
||||
else:
|
||||
input_language = self.pipeline.conversation_language
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_START,
|
||||
{
|
||||
"engine": self.intent_agent,
|
||||
"language": self.pipeline.conversation_language,
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": device_id,
|
||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||
},
|
||||
)
|
||||
)
|
||||
@ -1028,9 +1043,10 @@ class PipelineRun:
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
language=self.pipeline.language,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
conversation_result: conversation.ConversationResult | None = None
|
||||
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
|
||||
@ -1061,6 +1077,7 @@ class PipelineRun:
|
||||
response=intent_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
processed_locally = True
|
||||
|
||||
if conversation_result is None:
|
||||
# Fall back to pipeline conversation agent
|
||||
@ -1085,7 +1102,10 @@ class PipelineRun:
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_END,
|
||||
{"intent_output": conversation_result.as_dict()},
|
||||
{
|
||||
"processed_locally": processed_locally,
|
||||
"intent_output": conversation_result.as_dict(),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -140,7 +140,7 @@ class VoiceCommandSegmenter:
|
||||
|
||||
self._timeout_seconds_left -= chunk_seconds
|
||||
if self._timeout_seconds_left <= 0:
|
||||
_LOGGER.warning(
|
||||
_LOGGER.debug(
|
||||
"VAD end of speech detection timed out after %s seconds",
|
||||
self.timeout_seconds,
|
||||
)
|
||||
|
@ -46,7 +46,6 @@ class AtagThermostat(AtagEntity, ClimateEntity):
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None:
|
||||
"""Initialize an Atag climate device."""
|
||||
|
@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.0.15"]
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
}
|
||||
|
@ -4,11 +4,12 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from autarco import Autarco
|
||||
from autarco import Autarco, AutarcoConnectionError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import AutarcoDataUpdateCoordinator
|
||||
@ -25,7 +26,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> b
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
account_sites = await client.get_account()
|
||||
|
||||
try:
|
||||
account_sites = await client.get_account()
|
||||
except AutarcoConnectionError as err:
|
||||
await client.close()
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
coordinators: list[AutarcoDataUpdateCoordinator] = [
|
||||
AutarcoDataUpdateCoordinator(hass, client, site) for site in account_sites
|
||||
|
@ -28,7 +28,7 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
}
|
||||
},
|
||||
|
@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==63"],
|
||||
"requirements": ["axis==64"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
@ -5,36 +5,81 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DATA_MANAGER, DOMAIN, LOGGER
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupAgentPlatformProtocol,
|
||||
LocalBackupAgent,
|
||||
)
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
from .http import async_register_http_views
|
||||
from .manager import BackupManager
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"ManagerBackup",
|
||||
"BackupAgent",
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Backup integration."""
|
||||
backup_manager = BackupManager(hass)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
|
||||
with_hassio = is_hassio(hass)
|
||||
|
||||
reader_writer: BackupReaderWriter
|
||||
if not with_hassio:
|
||||
reader_writer = CoreBackupReaderWriter(hass)
|
||||
else:
|
||||
# pylint: disable-next=import-outside-toplevel, hass-component-root-import
|
||||
from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter
|
||||
|
||||
reader_writer = SupervisorBackupReaderWriter(hass)
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
await backup_manager.async_setup()
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
if with_hassio:
|
||||
if DOMAIN in config:
|
||||
LOGGER.error(
|
||||
"The backup integration is not supported on this installation method, "
|
||||
"please remove it from your configuration"
|
||||
)
|
||||
return True
|
||||
|
||||
async def async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
await backup_manager.async_create_backup()
|
||||
agent_id = list(backup_manager.local_backup_agents)[0]
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=[agent_id],
|
||||
include_addons=None,
|
||||
include_all_addons=False,
|
||||
include_database=True,
|
||||
include_folders=None,
|
||||
include_homeassistant=True,
|
||||
name=None,
|
||||
password=None,
|
||||
)
|
||||
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
if not with_hassio:
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
|
121
homeassistant/components/backup/agent.py
Normal file
121
homeassistant/components/backup/agent.py
Normal file
@ -0,0 +1,121 @@
|
||||
"""Backup agents for the Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from pathlib import Path
|
||||
from typing import Any, Protocol
|
||||
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .models import AgentBackup
|
||||
|
||||
|
||||
class BackupAgentError(HomeAssistantError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
|
||||
class BackupAgentUnreachableError(BackupAgentError):
|
||||
"""Raised when the agent can't reach its API."""
|
||||
|
||||
_message = "The backup agent is unreachable."
|
||||
|
||||
|
||||
class BackupAgent(abc.ABC):
|
||||
"""Backup agent interface."""
|
||||
|
||||
domain: str
|
||||
name: str
|
||||
|
||||
@cached_property
|
||||
def agent_id(self) -> str:
|
||||
"""Return the agent_id."""
|
||||
return f"{self.domain}.{self.name}"
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
|
||||
|
||||
class LocalBackupAgent(BackupAgent):
|
||||
"""Local backup agent."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup.
|
||||
|
||||
The method should return the path to the backup file with the specified id.
|
||||
"""
|
||||
|
||||
|
||||
class BackupAgentPlatformProtocol(Protocol):
|
||||
"""Define the format of backup platforms which implement backup agents."""
|
||||
|
||||
async def async_get_backup_agents(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
125
homeassistant/components/backup/backup.py
Normal file
125
homeassistant/components/backup/backup.py
Normal file
@ -0,0 +1,125 @@
|
||||
"""Local backup support for Core and Container installations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import json
|
||||
from pathlib import Path
|
||||
from tarfile import TarError
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .agent import BackupAgent, LocalBackupAgent
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import AgentBackup
|
||||
from .util import read_backup
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return the local backup agent."""
|
||||
if is_hassio(hass):
|
||||
return []
|
||||
return [CoreLocalBackupAgent(hass)]
|
||||
|
||||
|
||||
class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
"""Local backup agent for Core and Container installations."""
|
||||
|
||||
domain = DOMAIN
|
||||
name = "local"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._backup_dir = Path(hass.config.path("backups"))
|
||||
self._backups: dict[str, AgentBackup] = {}
|
||||
self._loaded_backups = False
|
||||
|
||||
async def _load_backups(self) -> None:
|
||||
"""Load data of stored backup files."""
|
||||
backups = await self._hass.async_add_executor_job(self._read_backups)
|
||||
LOGGER.debug("Loaded %s local backups", len(backups))
|
||||
self._backups = backups
|
||||
self._loaded_backups = True
|
||||
|
||||
def _read_backups(self) -> dict[str, AgentBackup]:
|
||||
"""Read backups from disk."""
|
||||
backups: dict[str, AgentBackup] = {}
|
||||
for backup_path in self._backup_dir.glob("*.tar"):
|
||||
try:
|
||||
backup = read_backup(backup_path)
|
||||
backups[backup.backup_id] = backup
|
||||
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
||||
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
||||
return backups
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
self._backups[backup.backup_id] = backup
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
return list(self._backups.values())
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
if not (backup := self._backups.get(backup_id)):
|
||||
return None
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||
LOGGER.debug(
|
||||
(
|
||||
"Removing tracked backup (%s) that does not exists on the expected"
|
||||
" path %s"
|
||||
),
|
||||
backup.backup_id,
|
||||
backup_path,
|
||||
)
|
||||
self._backups.pop(backup_id)
|
||||
return None
|
||||
|
||||
return backup
|
||||
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup."""
|
||||
return self._backup_dir / f"{backup_id}.tar"
|
||||
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
"""Delete a backup file."""
|
||||
if await self.async_get_backup(backup_id) is None:
|
||||
return
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||
self._backups.pop(backup_id)
|
471
homeassistant/components/backup/config.py
Normal file
471
homeassistant/components/backup/config.py
Normal file
@ -0,0 +1,471 @@
|
||||
"""Provide persistent configuration for the backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
|
||||
# The time of the automatic backup event should be compatible with
|
||||
# the time of the recorder's nightly job which runs at 04:12.
|
||||
# Run the backup at 04:45.
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
create_backup: StoredCreateBackupConfig
|
||||
last_attempted_strategy_backup: str | None
|
||||
last_completed_strategy_backup: str | None
|
||||
retention: StoredRetentionConfig
|
||||
schedule: StoredBackupSchedule
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupConfigData:
|
||||
"""Represent loaded backup config data."""
|
||||
|
||||
create_backup: CreateBackupConfig
|
||||
last_attempted_strategy_backup: datetime | None = None
|
||||
last_completed_strategy_backup: datetime | None = None
|
||||
retention: RetentionConfig
|
||||
schedule: BackupSchedule
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: StoredBackupConfig) -> Self:
|
||||
"""Initialize backup config data from a dict."""
|
||||
include_folders_data = data["create_backup"]["include_folders"]
|
||||
if include_folders_data:
|
||||
include_folders = [Folder(folder) for folder in include_folders_data]
|
||||
else:
|
||||
include_folders = None
|
||||
retention = data["retention"]
|
||||
|
||||
if last_attempted_str := data["last_attempted_strategy_backup"]:
|
||||
last_attempted = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if last_attempted_str := data["last_completed_strategy_backup"]:
|
||||
last_completed = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return cls(
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
include_addons=data["create_backup"]["include_addons"],
|
||||
include_all_addons=data["create_backup"]["include_all_addons"],
|
||||
include_database=data["create_backup"]["include_database"],
|
||||
include_folders=include_folders,
|
||||
name=data["create_backup"]["name"],
|
||||
password=data["create_backup"]["password"],
|
||||
),
|
||||
last_attempted_strategy_backup=last_attempted,
|
||||
last_completed_strategy_backup=last_completed,
|
||||
retention=RetentionConfig(
|
||||
copies=retention["copies"],
|
||||
days=retention["days"],
|
||||
),
|
||||
schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])),
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupConfig:
|
||||
"""Convert backup config data to a dict."""
|
||||
if self.last_attempted_strategy_backup:
|
||||
last_attempted = self.last_attempted_strategy_backup.isoformat()
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if self.last_completed_strategy_backup:
|
||||
last_completed = self.last_completed_strategy_backup.isoformat()
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return StoredBackupConfig(
|
||||
create_backup=self.create_backup.to_dict(),
|
||||
last_attempted_strategy_backup=last_attempted,
|
||||
last_completed_strategy_backup=last_completed,
|
||||
retention=self.retention.to_dict(),
|
||||
schedule=self.schedule.to_dict(),
|
||||
)
|
||||
|
||||
|
||||
class BackupConfig:
|
||||
"""Handle backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize backup config."""
|
||||
self.data = BackupConfigData(
|
||||
create_backup=CreateBackupConfig(),
|
||||
retention=RetentionConfig(),
|
||||
schedule=BackupSchedule(),
|
||||
)
|
||||
self._manager = manager
|
||||
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
self,
|
||||
*,
|
||||
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
|
||||
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleState | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update config."""
|
||||
if create_backup is not UNDEFINED:
|
||||
self.data.create_backup = replace(self.data.create_backup, **create_backup)
|
||||
if retention is not UNDEFINED:
|
||||
new_retention = RetentionConfig(**retention)
|
||||
if new_retention != self.data.retention:
|
||||
self.data.retention = new_retention
|
||||
self.data.retention.apply(self._manager)
|
||||
if schedule is not UNDEFINED:
|
||||
new_schedule = BackupSchedule(state=schedule)
|
||||
if new_schedule.to_dict() != self.data.schedule.to_dict():
|
||||
self.data.schedule = new_schedule
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
self._manager.store.save()
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig:
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
copies: int | None = None
|
||||
days: int | None = None
|
||||
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Schedule the next delete after days."""
|
||||
self._unschedule_next(manager)
|
||||
|
||||
async def _delete_backups(now: datetime) -> None:
|
||||
"""Delete backups older than days."""
|
||||
self._schedule_next(manager)
|
||||
|
||||
def _backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return backups older than days to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if self.days is None:
|
||||
return {}
|
||||
now = dt_util.utcnow()
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=self.days)
|
||||
< now
|
||||
}
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
|
||||
manager.remove_next_delete_event = async_call_later(
|
||||
manager.hass, timedelta(days=1), _delete_backups
|
||||
)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next delete after days."""
|
||||
if (remove_next_event := manager.remove_next_delete_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_delete_event = None
|
||||
|
||||
|
||||
class StoredRetentionConfig(TypedDict):
|
||||
"""Represent the stored backup retention configuration."""
|
||||
|
||||
copies: int | None
|
||||
days: int | None
|
||||
|
||||
|
||||
class RetentionParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for retention."""
|
||||
|
||||
copies: int | None
|
||||
days: int | None
|
||||
|
||||
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
state: ScheduleState
|
||||
|
||||
|
||||
class ScheduleState(StrEnum):
|
||||
"""Represent the schedule state."""
|
||||
|
||||
NEVER = "never"
|
||||
DAILY = "daily"
|
||||
MONDAY = "mon"
|
||||
TUESDAY = "tue"
|
||||
WEDNESDAY = "wed"
|
||||
THURSDAY = "thu"
|
||||
FRIDAY = "fri"
|
||||
SATURDAY = "sat"
|
||||
SUNDAY = "sun"
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupSchedule:
|
||||
"""Represent the backup schedule."""
|
||||
|
||||
state: ScheduleState = ScheduleState.NEVER
|
||||
cron_event: CronSim | None = field(init=False, default=None)
|
||||
|
||||
@callback
|
||||
def apply(
|
||||
self,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Apply a new schedule.
|
||||
|
||||
There are only three possible state types: never, daily, or weekly.
|
||||
"""
|
||||
if self.state is ScheduleState.NEVER:
|
||||
self._unschedule_next(manager)
|
||||
return
|
||||
|
||||
if self.state is ScheduleState.DAILY:
|
||||
self._schedule_next(CRON_PATTERN_DAILY, manager)
|
||||
else:
|
||||
self._schedule_next(
|
||||
CRON_PATTERN_WEEKLY.format(self.state.value),
|
||||
manager,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
cron_pattern: str,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Schedule the next backup."""
|
||||
self._unschedule_next(manager)
|
||||
now = dt_util.now()
|
||||
if (cron_event := self.cron_event) is None:
|
||||
seed_time = manager.config.data.last_completed_strategy_backup or now
|
||||
cron_event = self.cron_event = CronSim(cron_pattern, seed_time)
|
||||
next_time = next(cron_event)
|
||||
|
||||
if next_time < now:
|
||||
# schedule a backup at next daily time once
|
||||
# if we missed the last scheduled backup
|
||||
cron_event = CronSim(CRON_PATTERN_DAILY, now)
|
||||
next_time = next(cron_event)
|
||||
# reseed the cron event attribute
|
||||
# add a day to the next time to avoid scheduling at the same time again
|
||||
self.cron_event = CronSim(cron_pattern, now + timedelta(days=1))
|
||||
|
||||
async def _create_backup(now: datetime) -> None:
|
||||
"""Create backup."""
|
||||
manager.remove_next_backup_event = None
|
||||
config_data = manager.config.data
|
||||
self._schedule_next(cron_pattern, manager)
|
||||
|
||||
# create the backup
|
||||
try:
|
||||
await manager.async_create_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_strategy_settings=True,
|
||||
)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
# delete old backups more numerous than copies
|
||||
|
||||
def _backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if config_data.retention.copies is None:
|
||||
return {}
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: len(backups) - config_data.retention.copies]
|
||||
)
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupSchedule:
|
||||
"""Convert backup schedule to a dict."""
|
||||
return StoredBackupSchedule(state=self.state)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next backup."""
|
||||
if (remove_next_event := manager.remove_next_backup_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_backup_event = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class CreateBackupConfig:
|
||||
"""Represent the config for async_create_backup."""
|
||||
|
||||
agent_ids: list[str] = field(default_factory=list)
|
||||
include_addons: list[str] | None = None
|
||||
include_all_addons: bool = False
|
||||
include_database: bool = True
|
||||
include_folders: list[Folder] | None = None
|
||||
name: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
def to_dict(self) -> StoredCreateBackupConfig:
|
||||
"""Convert create backup config to a dict."""
|
||||
return {
|
||||
"agent_ids": self.agent_ids,
|
||||
"include_addons": self.include_addons,
|
||||
"include_all_addons": self.include_all_addons,
|
||||
"include_database": self.include_database,
|
||||
"include_folders": self.include_folders,
|
||||
"name": self.name,
|
||||
"password": self.password,
|
||||
}
|
||||
|
||||
|
||||
class StoredCreateBackupConfig(TypedDict):
|
||||
"""Represent the stored config for async_create_backup."""
|
||||
|
||||
agent_ids: list[str]
|
||||
include_addons: list[str] | None
|
||||
include_all_addons: bool
|
||||
include_database: bool
|
||||
include_folders: list[Folder] | None
|
||||
name: str | None
|
||||
password: str | None
|
||||
|
||||
|
||||
class CreateBackupParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for async_create_backup."""
|
||||
|
||||
agent_ids: list[str]
|
||||
include_addons: list[str] | None
|
||||
include_all_addons: bool
|
||||
include_database: bool
|
||||
include_folders: list[Folder] | None
|
||||
name: str | None
|
||||
password: str | None
|
||||
|
||||
|
||||
async def _delete_filtered_backups(
|
||||
manager: BackupManager,
|
||||
backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||
) -> None:
|
||||
"""Delete backups parsed with a filter.
|
||||
|
||||
:param manager: The backup manager.
|
||||
:param backup_filter: A filter that should return the backups to delete.
|
||||
"""
|
||||
backups, get_agent_errors = await manager.async_get_backups()
|
||||
if get_agent_errors:
|
||||
LOGGER.debug(
|
||||
"Error getting backups; continuing anyway: %s",
|
||||
get_agent_errors,
|
||||
)
|
||||
|
||||
# only delete backups that are created by the backup strategy
|
||||
backups = {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if backup.with_strategy_settings
|
||||
}
|
||||
|
||||
LOGGER.debug("Total strategy backups: %s", backups)
|
||||
|
||||
filtered_backups = backup_filter(backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
filtered_backups = dict(
|
||||
sorted(
|
||||
filtered_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)
|
||||
)
|
||||
|
||||
if len(filtered_backups) >= len(backups):
|
||||
# Never delete the last backup.
|
||||
last_backup = filtered_backups.popitem()
|
||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||
|
||||
LOGGER.debug("Backups to delete: %s", filtered_backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
backup_ids = list(filtered_backups)
|
||||
delete_results = await asyncio.gather(
|
||||
*(manager.async_delete_backup(backup_id) for backup_id in filtered_backups)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
"Error deleting old copies: %s",
|
||||
agent_errors,
|
||||
)
|
@ -10,6 +10,7 @@ from homeassistant.util.hass_dict import HassKey
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
DOMAIN = "backup"
|
||||
DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
@ -22,6 +23,12 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"*.log.*",
|
||||
"*.log",
|
||||
"backups/*.tar",
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
"home-assistant_v2.db",
|
||||
"home-assistant_v2.db-wal",
|
||||
]
|
||||
|
@ -8,10 +8,11 @@ from typing import cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
@ -27,30 +28,47 @@ def async_register_http_views(hass: HomeAssistant) -> None:
|
||||
class DownloadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
url = "/api/backup/download/{slug}"
|
||||
url = "/api/backup/download/{backup_id}"
|
||||
name = "api:backup:download"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: Request,
|
||||
slug: str,
|
||||
) -> FileResponse | Response:
|
||||
backup_id: str,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
"""Download a backup file."""
|
||||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
try:
|
||||
agent_id = request.query.getone("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
|
||||
if backup is None or not backup.path.exists():
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will handle
|
||||
# that
|
||||
if backup is None:
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
return FileResponse(
|
||||
path=backup.path.as_posix(),
|
||||
headers={
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
},
|
||||
)
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
return FileResponse(path=path.as_posix(), headers=headers)
|
||||
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
response = StreamResponse(status=HTTPStatus.OK, headers=headers)
|
||||
await response.prepare(request)
|
||||
async for chunk in stream:
|
||||
await response.write(chunk)
|
||||
return response
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
@ -62,15 +80,24 @@ class UploadBackupView(HomeAssistantView):
|
||||
@require_admin
|
||||
async def post(self, request: Request) -> Response:
|
||||
"""Upload a backup file."""
|
||||
try:
|
||||
agent_ids = request.query.getall("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
reader = await request.multipart()
|
||||
contents = cast(BodyPartReader, await reader.next())
|
||||
|
||||
try:
|
||||
await manager.async_receive_backup(contents=contents)
|
||||
await manager.async_receive_backup(contents=contents, agent_ids=agent_ids)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file {err}",
|
||||
body=f"Can't write backup file: {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
return Response(
|
||||
body=f"Can't upload backup file: {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,12 @@
|
||||
{
|
||||
"domain": "backup",
|
||||
"name": "Backup",
|
||||
"after_dependencies": ["hassio"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["securetar==2024.11.0"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2024.11.0"]
|
||||
}
|
||||
|
61
homeassistant/components/backup/models.py
Normal file
61
homeassistant/components/backup/models.py
Normal file
@ -0,0 +1,61 @@
|
||||
"""Models for the backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
"""Addon information."""
|
||||
|
||||
name: str
|
||||
slug: str
|
||||
version: str
|
||||
|
||||
|
||||
class Folder(StrEnum):
|
||||
"""Folder type."""
|
||||
|
||||
SHARE = "share"
|
||||
ADDONS = "addons/local"
|
||||
SSL = "ssl"
|
||||
MEDIA = "media"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AgentBackup:
|
||||
"""Base backup class."""
|
||||
|
||||
addons: list[AddonInfo]
|
||||
backup_id: str
|
||||
date: str
|
||||
database_included: bool
|
||||
folders: list[Folder]
|
||||
homeassistant_included: bool
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
name: str
|
||||
protected: bool
|
||||
size: int
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return a dict representation of this backup."""
|
||||
return asdict(self)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> Self:
|
||||
"""Create an instance from a JSON serialization."""
|
||||
return cls(
|
||||
addons=[AddonInfo(**addon) for addon in data["addons"]],
|
||||
backup_id=data["backup_id"],
|
||||
date=data["date"],
|
||||
database_included=data["database_included"],
|
||||
folders=[Folder(folder) for folder in data["folders"]],
|
||||
homeassistant_included=data["homeassistant_included"],
|
||||
homeassistant_version=data["homeassistant_version"],
|
||||
name=data["name"],
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
52
homeassistant/components/backup/store.py
Normal file
52
homeassistant/components/backup/store.py
Normal file
@ -0,0 +1,52 @@
|
||||
"""Store backup configuration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .config import StoredBackupConfig
|
||||
from .manager import BackupManager, StoredKnownBackup
|
||||
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
backups: list[StoredKnownBackup]
|
||||
config: StoredBackupConfig
|
||||
|
||||
|
||||
class BackupStore:
|
||||
"""Store backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
self._hass = hass
|
||||
self._manager = manager
|
||||
self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
|
||||
async def load(self) -> StoredBackupData | None:
|
||||
"""Load the store."""
|
||||
return await self._store.async_load()
|
||||
|
||||
@callback
|
||||
def save(self) -> None:
|
||||
"""Save config."""
|
||||
self._store.async_delay_save(self._data_to_save, STORE_DELAY_SAVE)
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> StoredBackupData:
|
||||
"""Return data to save."""
|
||||
return {
|
||||
"backups": self._manager.known_backups.to_list(),
|
||||
"config": self._manager.config.data.to_dict(),
|
||||
}
|
111
homeassistant/components/backup/util.py
Normal file
111
homeassistant/components/backup/util.py
Normal file
@ -0,0 +1,111 @@
|
||||
"""Local backup support for Core and Container installations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
from typing import cast
|
||||
|
||||
import aiohttp
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def read_backup(backup_path: Path) -> AgentBackup:
|
||||
"""Read a backup from disk."""
|
||||
|
||||
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
if not (data_file := backup_file.extractfile("./backup.json")):
|
||||
raise KeyError("backup.json not found in tar file")
|
||||
data = json_loads_object(data_file.read())
|
||||
addons = [
|
||||
AddonInfo(
|
||||
name=cast(str, addon["name"]),
|
||||
slug=cast(str, addon["slug"]),
|
||||
version=cast(str, addon["version"]),
|
||||
)
|
||||
for addon in cast(list[JsonObjectType], data.get("addons", []))
|
||||
]
|
||||
|
||||
folders = [
|
||||
Folder(folder)
|
||||
for folder in cast(list[str], data.get("folders", []))
|
||||
if folder != "homeassistant"
|
||||
]
|
||||
|
||||
homeassistant_included = False
|
||||
homeassistant_version: str | None = None
|
||||
database_included = False
|
||||
if (
|
||||
homeassistant := cast(JsonObjectType, data.get("homeassistant"))
|
||||
) and "version" in homeassistant:
|
||||
homeassistant_version = cast(str, homeassistant["version"])
|
||||
database_included = not cast(
|
||||
bool, homeassistant.get("exclude_database", False)
|
||||
)
|
||||
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=cast(str, data["slug"]),
|
||||
database_included=database_included,
|
||||
date=cast(str, data["date"]),
|
||||
folders=folders,
|
||||
homeassistant_included=homeassistant_included,
|
||||
homeassistant_version=homeassistant_version,
|
||||
name=cast(str, data["name"]),
|
||||
protected=cast(bool, data.get("protected", False)),
|
||||
size=backup_path.stat().st_size,
|
||||
)
|
||||
|
||||
|
||||
async def receive_file(
|
||||
hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path
|
||||
) -> None:
|
||||
"""Receive a file from a stream and write it to a file."""
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = SimpleQueue()
|
||||
|
||||
def _sync_queue_consumer() -> None:
|
||||
with path.open("wb") as file_handle:
|
||||
while True:
|
||||
if (_chunk_future := queue.get()) is None:
|
||||
break
|
||||
_chunk, _future = _chunk_future
|
||||
if _future is not None:
|
||||
hass.loop.call_soon_threadsafe(_future.set_result, None)
|
||||
file_handle.write(_chunk)
|
||||
|
||||
fut: asyncio.Future[None] | None = None
|
||||
try:
|
||||
fut = hass.async_add_executor_job(_sync_queue_consumer)
|
||||
megabytes_sending = 0
|
||||
while chunk := await contents.read_chunk(BUF_SIZE):
|
||||
megabytes_sending += 1
|
||||
if megabytes_sending % 5 != 0:
|
||||
queue.put_nowait((chunk, None))
|
||||
continue
|
||||
|
||||
chunk_future = hass.loop.create_future()
|
||||
queue.put_nowait((chunk, chunk_future))
|
||||
await asyncio.wait(
|
||||
(fut, chunk_future),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if fut.done():
|
||||
# The executor job failed
|
||||
break
|
||||
|
||||
queue.put_nowait(None) # terminate queue consumer
|
||||
finally:
|
||||
if fut is not None:
|
||||
await fut
|
@ -7,22 +7,31 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .config import ScheduleState
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import ManagerStateEvent
|
||||
from .models import Folder
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, backup_agents_info)
|
||||
|
||||
if with_hassio:
|
||||
websocket_api.async_register_command(hass, handle_backup_end)
|
||||
websocket_api.async_register_command(hass, handle_backup_start)
|
||||
return
|
||||
|
||||
websocket_api.async_register_command(hass, handle_details)
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_remove)
|
||||
websocket_api.async_register_command(hass, handle_create_with_strategy_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@ -35,12 +44,16 @@ async def handle_info(
|
||||
) -> None:
|
||||
"""List all stored backups."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
backups = await manager.async_get_backups()
|
||||
backups, agent_errors = await manager.async_get_backups()
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": list(backups.values()),
|
||||
"backing_up": manager.backing_up,
|
||||
"last_attempted_strategy_backup": manager.config.data.last_attempted_strategy_backup,
|
||||
"last_completed_strategy_backup": manager.config.data.last_completed_strategy_backup,
|
||||
},
|
||||
)
|
||||
|
||||
@ -49,7 +62,7 @@ async def handle_info(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/details",
|
||||
vol.Required("slug"): str,
|
||||
vol.Required("backup_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@ -58,11 +71,16 @@ async def handle_details(
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get backup details for a specific slug."""
|
||||
backup = await hass.data[DATA_MANAGER].async_get_backup(slug=msg["slug"])
|
||||
"""Get backup details for a specific backup."""
|
||||
backup, agent_errors = await hass.data[DATA_MANAGER].async_get_backup(
|
||||
msg["backup_id"]
|
||||
)
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup,
|
||||
},
|
||||
)
|
||||
@ -71,26 +89,39 @@ async def handle_details(
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/remove",
|
||||
vol.Required("slug"): str,
|
||||
vol.Required("type"): "backup/delete",
|
||||
vol.Required("backup_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_remove(
|
||||
async def handle_delete(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Remove a backup."""
|
||||
await hass.data[DATA_MANAGER].async_remove_backup(slug=msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
"""Delete a backup."""
|
||||
agent_errors = await hass.data[DATA_MANAGER].async_delete_backup(msg["backup_id"])
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/restore",
|
||||
vol.Required("slug"): str,
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Optional("password"): str,
|
||||
vol.Optional("restore_addons"): [str],
|
||||
vol.Optional("restore_database", default=True): bool,
|
||||
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("restore_homeassistant", default=True): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@ -100,12 +131,32 @@ async def handle_restore(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"])
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
restore_addons=msg.get("restore_addons"),
|
||||
restore_database=msg["restore_database"],
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/generate"})
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate",
|
||||
vol.Required("agent_ids"): [str],
|
||||
vol.Optional("include_addons"): [str],
|
||||
vol.Optional("include_all_addons", default=False): bool,
|
||||
vol.Optional("include_database", default=True): bool,
|
||||
vol.Optional("include_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("include_homeassistant", default=True): bool,
|
||||
vol.Optional("name"): str,
|
||||
vol.Optional("password"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_create(
|
||||
hass: HomeAssistant,
|
||||
@ -113,7 +164,46 @@ async def handle_create(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup."""
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup()
|
||||
|
||||
backup = await hass.data[DATA_MANAGER].async_initiate_backup(
|
||||
agent_ids=msg["agent_ids"],
|
||||
include_addons=msg.get("include_addons"),
|
||||
include_all_addons=msg["include_all_addons"],
|
||||
include_database=msg["include_database"],
|
||||
include_folders=msg.get("include_folders"),
|
||||
include_homeassistant=msg["include_homeassistant"],
|
||||
name=msg.get("name"),
|
||||
password=msg.get("password"),
|
||||
)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate_with_strategy_settings",
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_create_with_strategy_settings(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup with stored settings."""
|
||||
|
||||
config_data = hass.data[DATA_MANAGER].config.data
|
||||
backup = await hass.data[DATA_MANAGER].async_initiate_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_strategy_settings=True,
|
||||
)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
@ -127,7 +217,6 @@ async def handle_backup_start(
|
||||
) -> None:
|
||||
"""Backup start notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = True
|
||||
LOGGER.debug("Backup start notification")
|
||||
|
||||
try:
|
||||
@ -149,7 +238,6 @@ async def handle_backup_end(
|
||||
) -> None:
|
||||
"""Backup end notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = False
|
||||
LOGGER.debug("Backup end notification")
|
||||
|
||||
try:
|
||||
@ -159,3 +247,97 @@ async def handle_backup_end(
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"})
|
||||
@websocket_api.async_response
|
||||
async def backup_agents_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return backup agents info."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agents": [{"agent_id": agent_id} for agent_id in manager.backup_agents],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/config/info"})
|
||||
@websocket_api.async_response
|
||||
async def handle_config_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"config": manager.config.data.to_dict(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
vol.Optional("agent_ids"): vol.All(list[str]),
|
||||
vol.Optional("include_addons"): vol.Any(list[str], None),
|
||||
vol.Optional("include_all_addons"): bool,
|
||||
vol.Optional("include_database"): bool,
|
||||
vol.Optional("include_folders"): vol.Any([vol.Coerce(Folder)], None),
|
||||
vol.Optional("name"): vol.Any(str, None),
|
||||
vol.Optional("password"): vol.Any(str, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("retention"): vol.Schema(
|
||||
{
|
||||
vol.Optional("copies"): vol.Any(int, None),
|
||||
vol.Optional("days"): vol.Any(int, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_config_update(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Update the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
changes = dict(msg)
|
||||
changes.pop("id")
|
||||
changes.pop("type")
|
||||
await manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
@ -40,7 +40,6 @@ class BAFAutoComfort(BAFEntity, ClimateEntity):
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.FAN_ONLY]
|
||||
_attr_translation_key = "auto_comfort"
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
|
@ -46,7 +46,7 @@ class BAFFan(BAFEntity, FanEntity):
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
_attr_preset_modes = [PRESET_MODE_AUTO]
|
||||
_attr_speed_count = SPEED_COUNT
|
||||
_attr_name = None
|
||||
|
@ -8,16 +8,12 @@ from aiobafi6 import Device, OffOnAuto
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_kelvin_to_mired,
|
||||
color_temperature_mired_to_kelvin,
|
||||
)
|
||||
|
||||
from . import BAFConfigEntry
|
||||
from .entity import BAFEntity
|
||||
@ -77,25 +73,17 @@ class BAFStandaloneLight(BAFLight):
|
||||
def __init__(self, device: Device) -> None:
|
||||
"""Init a standalone light."""
|
||||
super().__init__(device)
|
||||
self._attr_min_mireds = color_temperature_kelvin_to_mired(
|
||||
device.light_warmest_color_temperature
|
||||
)
|
||||
self._attr_max_mireds = color_temperature_kelvin_to_mired(
|
||||
device.light_coolest_color_temperature
|
||||
)
|
||||
self._attr_max_color_temp_kelvin = device.light_warmest_color_temperature
|
||||
self._attr_min_color_temp_kelvin = device.light_coolest_color_temperature
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update attrs from device."""
|
||||
super()._async_update_attrs()
|
||||
self._attr_color_temp = color_temperature_kelvin_to_mired(
|
||||
self._device.light_color_temperature
|
||||
)
|
||||
self._attr_color_temp_kelvin = self._device.light_color_temperature
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None:
|
||||
self._device.light_color_temperature = color_temperature_mired_to_kelvin(
|
||||
color_temp
|
||||
)
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None:
|
||||
self._device.light_color_temperature = color_temp
|
||||
await super().async_turn_on(**kwargs)
|
||||
|
@ -65,7 +65,6 @@ class BalboaClimateEntity(BalboaEntity, ClimateEntity):
|
||||
)
|
||||
_attr_translation_key = DOMAIN
|
||||
_attr_name = None
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, client: SpaClient) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
|
@ -38,7 +38,7 @@ class BalboaPumpFanEntity(BalboaEntity, FanEntity):
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
_attr_translation_key = "pump"
|
||||
|
||||
def __init__(self, control: SpaControl) -> None:
|
||||
|
@ -8,6 +8,7 @@ from aiohttp.client_exceptions import (
|
||||
ClientConnectorError,
|
||||
ClientOSError,
|
||||
ServerTimeoutError,
|
||||
WSMessageTypeError,
|
||||
)
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
@ -62,6 +63,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
|
||||
ServerTimeoutError,
|
||||
ApiException,
|
||||
TimeoutError,
|
||||
WSMessageTypeError,
|
||||
) as error:
|
||||
await client.close_api_client()
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error
|
||||
|
@ -210,3 +210,20 @@ BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
"cd",
|
||||
"ph",
|
||||
"radio",
|
||||
"tp1",
|
||||
"tp2",
|
||||
)
|
||||
BEOLINK_JOIN_SOURCES = (
|
||||
*BEOLINK_JOIN_SOURCES_TO_UPPER,
|
||||
"beoradio",
|
||||
"deezer",
|
||||
"spotify",
|
||||
"tidal",
|
||||
)
|
||||
|
@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==4.1.1.116.3"],
|
||||
"requirements": ["mozart-api==4.1.1.116.4"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
@ -74,6 +74,8 @@ from .const import (
|
||||
BANG_OLUFSEN_REPEAT_FROM_HA,
|
||||
BANG_OLUFSEN_REPEAT_TO_HA,
|
||||
BANG_OLUFSEN_STATES,
|
||||
BEOLINK_JOIN_SOURCES,
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER,
|
||||
CONF_BEOLINK_JID,
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
@ -135,7 +137,10 @@ async def async_setup_entry(
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_join",
|
||||
schema={vol.Optional("beolink_jid"): jid_regex},
|
||||
schema={
|
||||
vol.Optional("beolink_jid"): jid_regex,
|
||||
vol.Optional("source_id"): vol.In(BEOLINK_JOIN_SOURCES),
|
||||
},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
@ -985,12 +990,23 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
await self.async_beolink_leave()
|
||||
|
||||
# Custom actions:
|
||||
async def async_beolink_join(self, beolink_jid: str | None = None) -> None:
|
||||
async def async_beolink_join(
|
||||
self, beolink_jid: str | None = None, source_id: str | None = None
|
||||
) -> None:
|
||||
"""Join a Beolink multi-room experience."""
|
||||
# Touch to join
|
||||
if beolink_jid is None:
|
||||
await self._client.join_latest_beolink_experience()
|
||||
else:
|
||||
# Join a peer
|
||||
elif beolink_jid and source_id is None:
|
||||
await self._client.join_beolink_peer(jid=beolink_jid)
|
||||
# Join a peer and select specific source
|
||||
elif beolink_jid and source_id:
|
||||
# Beolink Converter NL/ML sources need to be in upper case
|
||||
if source_id in BEOLINK_JOIN_SOURCES_TO_UPPER:
|
||||
source_id = source_id.upper()
|
||||
|
||||
await self._client.join_beolink_peer(jid=beolink_jid, source=source_id)
|
||||
|
||||
async def async_beolink_expand(
|
||||
self, beolink_jids: list[str] | None = None, all_discovered: bool = False
|
||||
|
@ -48,6 +48,23 @@ beolink_join:
|
||||
example: 1111.2222222.33333333@products.bang-olufsen.com
|
||||
selector:
|
||||
text:
|
||||
source_id:
|
||||
required: false
|
||||
example: tidal
|
||||
selector:
|
||||
select:
|
||||
translation_key: "source_ids"
|
||||
options:
|
||||
- beoradio
|
||||
- deezer
|
||||
- spotify
|
||||
- tidal
|
||||
- radio
|
||||
- tp1
|
||||
- tp2
|
||||
- cd
|
||||
- aux_a
|
||||
- ph
|
||||
|
||||
beolink_leave:
|
||||
target:
|
||||
|
@ -29,6 +29,22 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"source_ids": {
|
||||
"options": {
|
||||
"beoradio": "ASE Beoradio",
|
||||
"deezer": "ASE / Mozart Deezer",
|
||||
"spotify": "ASE / Mozart Spotify",
|
||||
"tidal": "Mozart Tidal",
|
||||
"aux_a": "Beolink Converter NL/ML AUX_A",
|
||||
"cd": "Beolink Converter NL/ML CD",
|
||||
"ph": "Beolink Converter NL/ML PH",
|
||||
"radio": "Beolink Converter NL/ML RADIO",
|
||||
"tp1": "Beolink Converter NL/ML TP1",
|
||||
"tp2": "Beolink Converter NL/ML TP2"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": {
|
||||
"name": "Beolink all standby",
|
||||
@ -61,6 +77,10 @@
|
||||
"beolink_jid": {
|
||||
"name": "Beolink JID",
|
||||
"description": "Manually specify Beolink JID to join."
|
||||
},
|
||||
"source_id": {
|
||||
"name": "Source",
|
||||
"description": "Specify which source to join, behavior varies between hardware platforms. Source names prefaced by a platform name can only be used when connecting to that platform. For example \"ASE Beoradio\" can only be used when joining an ASE device, while ”ASE / Mozart Deezer” can be used with ASE or Mozart devices. A defined Beolink JID is required."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
|
@ -57,7 +57,6 @@ class BleBoxClimateEntity(BleBoxEntity[blebox_uniapi.climate.Climate], ClimateEn
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
@property
|
||||
def hvac_modes(self):
|
||||
|
@ -11,7 +11,7 @@ from blebox_uniapi.light import BleboxColorMode
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_EFFECT,
|
||||
ATTR_RGB_COLOR,
|
||||
ATTR_RGBW_COLOR,
|
||||
@ -22,6 +22,7 @@ from homeassistant.components.light import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import color as color_util
|
||||
|
||||
from . import BleBoxConfigEntry
|
||||
from .entity import BleBoxEntity
|
||||
@ -58,8 +59,8 @@ COLOR_MODE_MAP = {
|
||||
class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
"""Representation of BleBox lights."""
|
||||
|
||||
_attr_max_mireds = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds
|
||||
_attr_min_mireds = 154 # 1,000,000 divided by 6500 Kelvin = 154 Mireds
|
||||
_attr_min_color_temp_kelvin = 2700 # 370 Mireds
|
||||
_attr_max_color_temp_kelvin = 6500 # 154 Mireds
|
||||
|
||||
def __init__(self, feature: blebox_uniapi.light.Light) -> None:
|
||||
"""Initialize a BleBox light."""
|
||||
@ -78,9 +79,9 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
return self._feature.brightness
|
||||
|
||||
@property
|
||||
def color_temp(self):
|
||||
"""Return color temperature."""
|
||||
return self._feature.color_temp
|
||||
def color_temp_kelvin(self) -> int:
|
||||
"""Return the color temperature value in Kelvin."""
|
||||
return color_util.color_temperature_mired_to_kelvin(self._feature.color_temp)
|
||||
|
||||
@property
|
||||
def color_mode(self):
|
||||
@ -136,7 +137,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
rgbw = kwargs.get(ATTR_RGBW_COLOR)
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
effect = kwargs.get(ATTR_EFFECT)
|
||||
color_temp = kwargs.get(ATTR_COLOR_TEMP)
|
||||
color_temp_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN)
|
||||
rgbww = kwargs.get(ATTR_RGBWW_COLOR)
|
||||
feature = self._feature
|
||||
value = feature.sensible_on_value
|
||||
@ -144,9 +145,10 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
|
||||
if rgbw is not None:
|
||||
value = list(rgbw)
|
||||
if color_temp is not None:
|
||||
if color_temp_kelvin is not None:
|
||||
value = feature.return_color_temp_with_brightness(
|
||||
int(color_temp), self.brightness
|
||||
int(color_util.color_temperature_kelvin_to_mired(color_temp_kelvin)),
|
||||
self.brightness,
|
||||
)
|
||||
|
||||
if rgbww is not None:
|
||||
@ -158,9 +160,12 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity):
|
||||
value = list(rgb)
|
||||
|
||||
if brightness is not None:
|
||||
if self.color_mode == ATTR_COLOR_TEMP:
|
||||
if self.color_mode == ColorMode.COLOR_TEMP:
|
||||
value = feature.return_color_temp_with_brightness(
|
||||
self.color_temp, brightness
|
||||
color_util.color_temperature_kelvin_to_mired(
|
||||
self.color_temp_kelvin
|
||||
),
|
||||
brightness,
|
||||
)
|
||||
else:
|
||||
value = feature.apply_brightness(value, brightness)
|
||||
|
@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID, CONF_PIN
|
||||
from homeassistant.const import CONF_PIN
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
@ -13,11 +13,6 @@ from homeassistant.helpers import config_validation as cv
|
||||
from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN
|
||||
from .coordinator import BlinkConfigEntry
|
||||
|
||||
SERVICE_UPDATE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
)
|
||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
|
@ -2,12 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_DEVICE_ID, CONF_ENTITY_ID, CONF_NAME, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import (
|
||||
@ -18,7 +16,7 @@ from homeassistant.helpers import (
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from .const import ATTR_VIN, CONF_READ_ONLY, DOMAIN
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .coordinator import BMWConfigEntry, BMWDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -49,19 +47,9 @@ PLATFORMS = [
|
||||
SERVICE_UPDATE_STATE = "update_state"
|
||||
|
||||
|
||||
type BMWConfigEntry = ConfigEntry[BMWData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BMWData:
|
||||
"""Class to store BMW runtime data."""
|
||||
|
||||
coordinator: BMWDataUpdateCoordinator
|
||||
|
||||
|
||||
@callback
|
||||
def _async_migrate_options_from_data_if_missing(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
hass: HomeAssistant, entry: BMWConfigEntry
|
||||
) -> None:
|
||||
data = dict(entry.data)
|
||||
options = dict(entry.options)
|
||||
@ -127,7 +115,7 @@ async def _async_migrate_entries(
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool:
|
||||
"""Set up BMW Connected Drive from a config entry."""
|
||||
|
||||
_async_migrate_options_from_data_if_missing(hass, entry)
|
||||
@ -137,11 +125,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# Set up one data coordinator per account/config entry
|
||||
coordinator = BMWDataUpdateCoordinator(
|
||||
hass,
|
||||
entry=entry,
|
||||
config_entry=entry,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = BMWData(coordinator)
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
# Set up all platforms except notify
|
||||
await hass.config_entries.async_forward_entry_setups(
|
||||
@ -175,7 +163,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BMWConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(
|
||||
|
@ -203,7 +203,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the BMW binary sensors from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities = [
|
||||
BMWBinarySensor(coordinator, vehicle, description, hass.config.units)
|
||||
|
@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -55,7 +55,6 @@ BUTTON_TYPES: tuple[BMWButtonEntityDescription, ...] = (
|
||||
BMWButtonEntityDescription(
|
||||
key="deactivate_air_conditioning",
|
||||
translation_key="deactivate_air_conditioning",
|
||||
name="Deactivate air conditioning",
|
||||
remote_function=lambda vehicle: vehicle.remote_services.trigger_remote_air_conditioning_stop(),
|
||||
is_available=lambda vehicle: vehicle.is_remote_climate_stop_enabled,
|
||||
),
|
||||
@ -73,7 +72,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the BMW buttons from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities: list[BMWButton] = []
|
||||
|
||||
@ -111,6 +110,10 @@ class BMWButton(BMWBaseEntity, ButtonEntity):
|
||||
try:
|
||||
await self.entity_description.remote_function(self.vehicle)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -18,7 +18,6 @@ import voluptuous as vol
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
@ -39,6 +38,7 @@ from .const import (
|
||||
CONF_READ_ONLY,
|
||||
CONF_REFRESH_TOKEN,
|
||||
)
|
||||
from .coordinator import BMWConfigEntry
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
@ -53,6 +53,12 @@ DATA_SCHEMA = vol.Schema(
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
RECONFIGURE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
CAPTCHA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_CAPTCHA_TOKEN): str,
|
||||
@ -111,9 +117,8 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}"
|
||||
await self.async_set_unique_id(unique_id)
|
||||
|
||||
if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}:
|
||||
self._abort_if_unique_id_mismatch(reason="account_mismatch")
|
||||
else:
|
||||
# Unique ID cannot change for reauth/reconfigure
|
||||
if self.source not in {SOURCE_REAUTH, SOURCE_RECONFIGURE}:
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Store user input for later use
|
||||
@ -166,19 +171,39 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
||||
|
||||
async def async_step_change_password(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the change password step."""
|
||||
existing_data = (
|
||||
dict(self._existing_entry_data) if self._existing_entry_data else {}
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
return await self.async_step_user(existing_data | user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="change_password",
|
||||
data_schema=RECONFIGURE_SCHEMA,
|
||||
description_placeholders={
|
||||
CONF_USERNAME: existing_data[CONF_USERNAME],
|
||||
CONF_REGION: existing_data[CONF_REGION],
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle configuration by re-auth."""
|
||||
self._existing_entry_data = entry_data
|
||||
return await self.async_step_user()
|
||||
return await self.async_step_change_password()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow initialized by the user."""
|
||||
self._existing_entry_data = self._get_reconfigure_entry().data
|
||||
return await self.async_step_user()
|
||||
return await self.async_step_change_password()
|
||||
|
||||
async def async_step_captcha(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@ -199,7 +224,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: BMWConfigEntry,
|
||||
) -> BMWOptionsFlow:
|
||||
"""Return a MyBMW option flow."""
|
||||
return BMWOptionsFlow()
|
||||
|
@ -22,39 +22,51 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS
|
||||
from .const import (
|
||||
CONF_GCID,
|
||||
CONF_READ_ONLY,
|
||||
CONF_REFRESH_TOKEN,
|
||||
DOMAIN as BMW_DOMAIN,
|
||||
SCAN_INTERVALS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type BMWConfigEntry = ConfigEntry[BMWDataUpdateCoordinator]
|
||||
|
||||
|
||||
class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to manage fetching BMW data."""
|
||||
|
||||
account: MyBMWAccount
|
||||
config_entry: BMWConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, *, entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, *, config_entry: BMWConfigEntry) -> None:
|
||||
"""Initialize account-wide BMW data updater."""
|
||||
self.account = MyBMWAccount(
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_PASSWORD],
|
||||
get_region_from_name(entry.data[CONF_REGION]),
|
||||
config_entry.data[CONF_USERNAME],
|
||||
config_entry.data[CONF_PASSWORD],
|
||||
get_region_from_name(config_entry.data[CONF_REGION]),
|
||||
observer_position=GPSPosition(hass.config.latitude, hass.config.longitude),
|
||||
verify=get_default_context(),
|
||||
)
|
||||
self.read_only = entry.options[CONF_READ_ONLY]
|
||||
self._entry = entry
|
||||
self.read_only: bool = config_entry.options[CONF_READ_ONLY]
|
||||
|
||||
if CONF_REFRESH_TOKEN in entry.data:
|
||||
if CONF_REFRESH_TOKEN in config_entry.data:
|
||||
self.account.set_refresh_token(
|
||||
refresh_token=entry.data[CONF_REFRESH_TOKEN],
|
||||
gcid=entry.data.get(CONF_GCID),
|
||||
refresh_token=config_entry.data[CONF_REFRESH_TOKEN],
|
||||
gcid=config_entry.data.get(CONF_GCID),
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=f"{DOMAIN}-{entry.data['username']}",
|
||||
update_interval=timedelta(seconds=SCAN_INTERVALS[entry.data[CONF_REGION]]),
|
||||
config_entry=config_entry,
|
||||
name=f"{BMW_DOMAIN}-{config_entry.data[CONF_USERNAME]}",
|
||||
update_interval=timedelta(
|
||||
seconds=SCAN_INTERVALS[config_entry.data[CONF_REGION]]
|
||||
),
|
||||
)
|
||||
|
||||
# Default to false on init so _async_update_data logic works
|
||||
@ -69,18 +81,29 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
except MyBMWCaptchaMissingError as err:
|
||||
# If a captcha is required (user/password login flow), always trigger the reauth flow
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="missing_captcha",
|
||||
) from err
|
||||
except MyBMWAuthError as err:
|
||||
# Allow one retry interval before raising AuthFailed to avoid flaky API issues
|
||||
if self.last_update_success:
|
||||
raise UpdateFailed(err) from err
|
||||
raise UpdateFailed(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"exception": str(err)},
|
||||
) from err
|
||||
# Clear refresh token and trigger reauth if previous update failed as well
|
||||
self._update_config_entry_refresh_token(None)
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from err
|
||||
except (MyBMWAPIError, RequestError) as err:
|
||||
raise UpdateFailed(err) from err
|
||||
raise UpdateFailed(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={"exception": str(err)},
|
||||
) from err
|
||||
|
||||
if self.account.refresh_token != old_refresh_token:
|
||||
self._update_config_entry_refresh_token(self.account.refresh_token)
|
||||
@ -88,9 +111,9 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
def _update_config_entry_refresh_token(self, refresh_token: str | None) -> None:
|
||||
"""Update or delete the refresh_token in the Config Entry."""
|
||||
data = {
|
||||
**self._entry.data,
|
||||
**self.config_entry.data,
|
||||
CONF_REFRESH_TOKEN: refresh_token,
|
||||
}
|
||||
if not refresh_token:
|
||||
data.pop(CONF_REFRESH_TOKEN)
|
||||
self.hass.config_entries.async_update_entry(self._entry, data=data)
|
||||
self.hass.config_entries.async_update_entry(self.config_entry, data=data)
|
||||
|
@ -27,7 +27,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW tracker from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
entities: list[BMWDeviceTracker] = []
|
||||
|
||||
for vehicle in coordinator.account.vehicles:
|
||||
@ -49,7 +49,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity):
|
||||
|
||||
_attr_force_update = False
|
||||
_attr_translation_key = "car"
|
||||
_attr_icon = "mdi:car"
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -58,9 +58,7 @@ class BMWDeviceTracker(BMWBaseEntity, TrackerEntity):
|
||||
) -> None:
|
||||
"""Initialize the Tracker."""
|
||||
super().__init__(coordinator, vehicle)
|
||||
|
||||
self._attr_unique_id = vehicle.vin
|
||||
self._attr_name = None
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
|
@ -51,7 +51,7 @@ async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: BMWConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
coordinator.account.config.log_responses = True
|
||||
await coordinator.account.get_vehicles(force_init=True)
|
||||
@ -77,7 +77,7 @@ async def async_get_device_diagnostics(
|
||||
hass: HomeAssistant, config_entry: BMWConfigEntry, device: DeviceEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a device."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
coordinator.account.config.log_responses = True
|
||||
await coordinator.account.get_vehicles(force_init=True)
|
||||
|
@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -31,7 +31,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW lock from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
if not coordinator.read_only:
|
||||
async_add_entities(
|
||||
@ -70,7 +70,11 @@ class BMWLock(BMWBaseEntity, LockEntity):
|
||||
# Set the state to unknown if the command fails
|
||||
self._attr_is_locked = None
|
||||
self.async_write_ha_state()
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
finally:
|
||||
# Always update the listeners to get the latest state
|
||||
self.coordinator.async_update_listeners()
|
||||
@ -90,7 +94,11 @@ class BMWLock(BMWBaseEntity, LockEntity):
|
||||
# Set the state to unknown if the command fails
|
||||
self._attr_is_locked = None
|
||||
self.async_write_ha_state()
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
finally:
|
||||
# Always update the listeners to get the latest state
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -20,7 +20,7 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN, BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@ -53,7 +53,7 @@ def get_service(
|
||||
targets = {}
|
||||
if (
|
||||
config_entry
|
||||
and (coordinator := config_entry.runtime_data.coordinator)
|
||||
and (coordinator := config_entry.runtime_data)
|
||||
and not coordinator.read_only
|
||||
):
|
||||
targets.update({v.name: v for v in coordinator.account.vehicles})
|
||||
@ -92,7 +92,7 @@ class BMWNotificationService(BaseNotificationService):
|
||||
|
||||
except (vol.Invalid, TypeError, ValueError) as ex:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="invalid_poi",
|
||||
translation_placeholders={
|
||||
"poi_exception": str(ex),
|
||||
@ -106,4 +106,8 @@ class BMWNotificationService(BaseNotificationService):
|
||||
try:
|
||||
await vehicle.remote_services.trigger_send_poi(poi)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -61,7 +61,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW number from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities: list[BMWNumber] = []
|
||||
|
||||
@ -109,6 +109,10 @@ class BMWNumber(BMWBaseEntity, NumberEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service(self.vehicle, value)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -68,7 +68,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW lock from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities: list[BMWSelect] = []
|
||||
|
||||
@ -123,6 +123,10 @@ class BMWSelect(BMWBaseEntity, SelectEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service(self.vehicle, option)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -193,7 +193,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW sensors from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities = [
|
||||
BMWSensor(coordinator, vehicle, description)
|
||||
|
@ -2,10 +2,16 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Connect to your MyBMW/MINI Connected account to retrieve vehicle data.",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"region": "ConnectedDrive Region"
|
||||
},
|
||||
"data_description": {
|
||||
"username": "The email address of your MyBMW/MINI Connected account.",
|
||||
"password": "The password of your MyBMW/MINI Connected account.",
|
||||
"region": "The region of your MyBMW/MINI Connected account."
|
||||
}
|
||||
},
|
||||
"captcha": {
|
||||
@ -17,6 +23,15 @@
|
||||
"data_description": {
|
||||
"captcha_token": "One-time token retrieved from the captcha challenge."
|
||||
}
|
||||
},
|
||||
"change_password": {
|
||||
"description": "Update your MyBMW/MINI Connected password for account `{username}` in region `{region}`.",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::bmw_connected_drive::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@ -27,15 +42,17 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"account_mismatch": "Username and region are not allowed to change"
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"account_options": {
|
||||
"data": {
|
||||
"read_only": "Read-only (only sensors and notify, no execution of services, no lock)"
|
||||
"read_only": "Read-only mode"
|
||||
},
|
||||
"data_description": {
|
||||
"read_only": "Only retrieve values and send POI data, but don't offer any services that can change the vehicle state."
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -77,6 +94,9 @@
|
||||
"activate_air_conditioning": {
|
||||
"name": "Activate air conditioning"
|
||||
},
|
||||
"deactivate_air_conditioning": {
|
||||
"name": "Deactivate air conditioning"
|
||||
},
|
||||
"find_vehicle": {
|
||||
"name": "Find vehicle"
|
||||
}
|
||||
@ -214,6 +234,15 @@
|
||||
},
|
||||
"missing_captcha": {
|
||||
"message": "Login requires captcha validation"
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"remote_service_error": {
|
||||
"message": "Error executing remote service on vehicle. {exception}"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Error updating vehicle data. {exception}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BMWConfigEntry
|
||||
from . import DOMAIN as BMW_DOMAIN, BMWConfigEntry
|
||||
from .coordinator import BMWDataUpdateCoordinator
|
||||
from .entity import BMWBaseEntity
|
||||
|
||||
@ -69,7 +69,7 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the MyBMW switch from config entry."""
|
||||
coordinator = config_entry.runtime_data.coordinator
|
||||
coordinator = config_entry.runtime_data
|
||||
|
||||
entities: list[BMWSwitch] = []
|
||||
|
||||
@ -111,8 +111,11 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service_on(self.vehicle)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
self.coordinator.async_update_listeners()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@ -120,6 +123,9 @@ class BMWSwitch(BMWBaseEntity, SwitchEntity):
|
||||
try:
|
||||
await self.entity_description.remote_service_off(self.vehicle)
|
||||
except MyBMWAPIError as ex:
|
||||
raise HomeAssistantError(ex) from ex
|
||||
|
||||
raise HomeAssistantError(
|
||||
translation_domain=BMW_DOMAIN,
|
||||
translation_key="remote_service_error",
|
||||
translation_placeholders={"exception": str(ex)},
|
||||
) from ex
|
||||
self.coordinator.async_update_listeners()
|
||||
|
@ -85,6 +85,7 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
if not (errors := await self.validate_input(user_input)):
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
self.reauth_entry, data=user_input
|
||||
)
|
||||
|
72
homeassistant/components/bring/quality_scale.yaml
Normal file
72
homeassistant/components/bring/quality_scale.yaml
Normal file
@ -0,0 +1,72 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Only entity services
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: todo
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: The integration registers no events
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: handled by coordinator
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Integration is a service and has no devices.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Integration is a service and has no devices.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
no repairs
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
@ -24,6 +24,8 @@ from .coordinator import BringData, BringDataUpdateCoordinator
|
||||
from .entity import BringBaseEntity
|
||||
from .util import list_language, sum_attributes
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class BringSensorEntityDescription(SensorEntityDescription):
|
||||
|
@ -26,7 +26,8 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unique_id_mismatch": "The login details correspond to a different account. Please re-authenticate to the previously configured account."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
@ -34,6 +34,8 @@ from .const import (
|
||||
from .coordinator import BringData, BringDataUpdateCoordinator
|
||||
from .entity import BringBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@ -52,7 +52,6 @@ class BroadlinkThermostat(BroadlinkEntity, ClimateEntity):
|
||||
)
|
||||
_attr_target_temperature_step = PRECISION_HALVES
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, device: BroadlinkDevice) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
|
@ -77,7 +77,6 @@ class BryantEvolutionClimate(ClimateEntity):
|
||||
HVACMode.OFF,
|
||||
]
|
||||
_attr_fan_modes = ["auto", "low", "med", "high"]
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -65,7 +65,6 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
|
||||
_attr_preset_modes = PRESET_MODES
|
||||
_attr_hvac_modes = HVAC_MODES
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -7,12 +7,18 @@ from aiostreammagic import StreamMagicClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str})
|
||||
|
||||
|
||||
class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Cambridge Audio configuration flow."""
|
||||
@ -64,6 +70,17 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=DATA_SCHEMA,
|
||||
)
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@ -82,6 +99,12 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(
|
||||
client.info.unit_id, raise_on_progress=False
|
||||
)
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_device")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates={CONF_HOST: user_input[CONF_HOST]},
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=client.info.name,
|
||||
@ -91,6 +114,6 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await client.disconnect()
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
|
||||
data_schema=DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
@ -7,6 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiostreammagic==2.10.0"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user