mirror of
https://github.com/home-assistant/core.git
synced 2025-12-25 09:18:27 +00:00
Compare commits
1 Commits
climate-to
...
dev_debug_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5f7f4339f |
@@ -6,7 +6,6 @@ core: &core
|
||||
- homeassistant/helpers/**
|
||||
- homeassistant/package_constraints.txt
|
||||
- homeassistant/util/**
|
||||
- mypy.ini
|
||||
- pyproject.toml
|
||||
- requirements.txt
|
||||
- setup.cfg
|
||||
@@ -132,7 +131,6 @@ tests: &tests
|
||||
- tests/components/conftest.py
|
||||
- tests/components/diagnostics/**
|
||||
- tests/components/history/**
|
||||
- tests/components/light/common.py
|
||||
- tests/components/logbook/**
|
||||
- tests/components/recorder/**
|
||||
- tests/components/repairs/**
|
||||
|
||||
14
.github/workflows/builder.yml
vendored
14
.github/workflows/builder.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
uses: dawidd6/action-download-artifact@v6
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -517,12 +517,12 @@ jobs:
|
||||
tags: ${{ env.HASSFEST_IMAGE_TAG }}
|
||||
|
||||
- name: Run hassfest against core
|
||||
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
|
||||
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components
|
||||
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
134
.github/workflows/ci.yaml
vendored
134
.github/workflows/ci.yaml
vendored
@@ -5,7 +5,7 @@ run-name: "${{ github.event_name == 'workflow_dispatch' && format('CI: {0}', git
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- dev_debug_translation_issue
|
||||
- rc
|
||||
- master
|
||||
pull_request: ~
|
||||
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.2"
|
||||
HA_SHORT_VERSION: "2024.12"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -198,6 +198,8 @@ jobs:
|
||||
skip_coverage="true"
|
||||
fi
|
||||
|
||||
test_groups="[4]"
|
||||
|
||||
# Output & sent to GitHub Actions
|
||||
echo "mariadb_groups: ${mariadb_groups}"
|
||||
echo "mariadb_groups=${mariadb_groups}" >> $GITHUB_OUTPUT
|
||||
@@ -240,7 +242,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -256,7 +258,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@v4.1.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -286,7 +288,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -295,7 +297,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -326,7 +328,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -335,7 +337,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -366,7 +368,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -375,7 +377,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -482,15 +484,16 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
lookup-only: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@v4.1.2
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -530,26 +533,6 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
|
||||
uv pip install -e . --config-settings editable_mode=compat
|
||||
- name: Dump pip freeze
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
overwrite: true
|
||||
- name: Remove pip_freeze
|
||||
run: rm pip_freeze.txt
|
||||
- name: Remove generated requirements_all
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: rm requirements_all_pytest.txt requirements_all_wheels_*.txt
|
||||
- name: Check dirty
|
||||
run: |
|
||||
./script/check_dirty
|
||||
|
||||
hassfest:
|
||||
name: Check hassfest
|
||||
@@ -578,7 +561,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -611,7 +594,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -649,7 +632,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -661,7 +644,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -677,6 +660,7 @@ jobs:
|
||||
if: |
|
||||
github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& 'true' == 'false'
|
||||
|| github.event.inputs.pylint-only == 'true'
|
||||
needs:
|
||||
- info
|
||||
@@ -692,7 +676,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -723,6 +707,7 @@ jobs:
|
||||
if: |
|
||||
(github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& 'true' == 'false'
|
||||
|| github.event.inputs.pylint-only == 'true')
|
||||
&& (needs.info.outputs.tests_glob || needs.info.outputs.test_full_suite == 'true')
|
||||
needs:
|
||||
@@ -739,7 +724,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -791,7 +776,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -799,7 +784,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@v4.1.2
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -838,46 +823,12 @@ jobs:
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Run split_tests.py
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -929,7 +880,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -966,11 +917,10 @@ jobs:
|
||||
|
||||
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
|
||||
python3 -b -X dev -m pytest \
|
||||
-qq \
|
||||
-x \
|
||||
--timeout=9 \
|
||||
--durations=10 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
--dist=loadfile \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
@@ -979,14 +929,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1014,6 +964,7 @@ jobs:
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.mariadb_groups != '[]'
|
||||
&& 'true' == 'false'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -1050,7 +1001,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1106,7 +1057,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1114,7 +1065,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1141,6 +1092,7 @@ jobs:
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
&& needs.info.outputs.postgresql_groups != '[]'
|
||||
&& 'true' == 'false'
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
@@ -1179,7 +1131,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1236,7 +1188,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1244,7 +1196,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1273,7 +1225,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@v5.0.7
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1325,7 +1277,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.0
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1378,14 +1330,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1411,7 +1363,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@v5.0.7
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.0
|
||||
uses: github/codeql-action/init@v3.27.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.0
|
||||
uses: github/codeql-action/analyze@v3.27.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
64
.github/workflows/wheels.yml
vendored
64
.github/workflows/wheels.yml
vendored
@@ -76,37 +76,18 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +99,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -142,11 +123,6 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -167,7 +143,7 @@ jobs:
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements.txt"
|
||||
@@ -191,11 +167,6 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -226,6 +197,33 @@ jobs:
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
# Build these first.
|
||||
# pydantic: https://github.com/pydantic/pydantic/issues/7689
|
||||
|
||||
touch requirements_old-cython.txt
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_old-cython.txt"
|
||||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -136,4 +136,4 @@ tmp_cache
|
||||
.ropeproject
|
||||
|
||||
# Will be created from script/split_tests.py
|
||||
pytest_buckets.txt
|
||||
# pytest_buckets.txt
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.3
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -12,7 +12,7 @@ repos:
|
||||
hooks:
|
||||
- id: codespell
|
||||
args:
|
||||
- --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
|
||||
- --skip="./.*,*.csv,*.json,*.ambr"
|
||||
- --quiet-level=2
|
||||
exclude_types: [csv, json, html]
|
||||
|
||||
@@ -41,7 +41,6 @@ homeassistant.util.unit_system
|
||||
# --- Add components below this line ---
|
||||
homeassistant.components
|
||||
homeassistant.components.abode.*
|
||||
homeassistant.components.acaia.*
|
||||
homeassistant.components.accuweather.*
|
||||
homeassistant.components.acer_projector.*
|
||||
homeassistant.components.acmeda.*
|
||||
@@ -137,7 +136,6 @@ homeassistant.components.co2signal.*
|
||||
homeassistant.components.command_line.*
|
||||
homeassistant.components.config.*
|
||||
homeassistant.components.configurator.*
|
||||
homeassistant.components.cookidoo.*
|
||||
homeassistant.components.counter.*
|
||||
homeassistant.components.cover.*
|
||||
homeassistant.components.cpuspeed.*
|
||||
@@ -170,7 +168,6 @@ homeassistant.components.easyenergy.*
|
||||
homeassistant.components.ecovacs.*
|
||||
homeassistant.components.ecowitt.*
|
||||
homeassistant.components.efergy.*
|
||||
homeassistant.components.eheimdigital.*
|
||||
homeassistant.components.electrasmart.*
|
||||
homeassistant.components.electric_kiwi.*
|
||||
homeassistant.components.elevenlabs.*
|
||||
@@ -271,7 +268,6 @@ homeassistant.components.ios.*
|
||||
homeassistant.components.iotty.*
|
||||
homeassistant.components.ipp.*
|
||||
homeassistant.components.iqvia.*
|
||||
homeassistant.components.iron_os.*
|
||||
homeassistant.components.islamic_prayer_times.*
|
||||
homeassistant.components.isy994.*
|
||||
homeassistant.components.jellyfin.*
|
||||
@@ -311,7 +307,6 @@ homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
homeassistant.components.media_source.*
|
||||
@@ -362,16 +357,13 @@ homeassistant.components.openuv.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
homeassistant.components.persistent_notification.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
homeassistant.components.private_ble_device.*
|
||||
homeassistant.components.prometheus.*
|
||||
@@ -408,13 +400,11 @@ homeassistant.components.romy.*
|
||||
homeassistant.components.rpi_power.*
|
||||
homeassistant.components.rss_feed_template.*
|
||||
homeassistant.components.rtsp_to_webrtc.*
|
||||
homeassistant.components.russound_rio.*
|
||||
homeassistant.components.ruuvi_gateway.*
|
||||
homeassistant.components.ruuvitag_ble.*
|
||||
homeassistant.components.samsungtv.*
|
||||
homeassistant.components.scene.*
|
||||
homeassistant.components.schedule.*
|
||||
homeassistant.components.schlage.*
|
||||
homeassistant.components.scrape.*
|
||||
homeassistant.components.script.*
|
||||
homeassistant.components.search.*
|
||||
@@ -447,6 +437,7 @@ homeassistant.components.ssdp.*
|
||||
homeassistant.components.starlink.*
|
||||
homeassistant.components.statistics.*
|
||||
homeassistant.components.steamist.*
|
||||
homeassistant.components.stookalert.*
|
||||
homeassistant.components.stookwijzer.*
|
||||
homeassistant.components.stream.*
|
||||
homeassistant.components.streamlabswater.*
|
||||
|
||||
42
.vscode/tasks.json
vendored
42
.vscode/tasks.json
vendored
@@ -16,7 +16,7 @@
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m pytest --timeout=10 tests",
|
||||
"command": "python3 -m pytest --timeout=10 tests",
|
||||
"dependsOn": ["Install all Test Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
@@ -31,7 +31,7 @@
|
||||
{
|
||||
"label": "Pytest (changed tests only)",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m pytest --timeout=10 --picked",
|
||||
"command": "python3 -m pytest --timeout=10 --picked",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -56,20 +56,6 @@
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pre-commit",
|
||||
"type": "shell",
|
||||
"command": "pre-commit run --show-diff-on-failure",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
@@ -89,23 +75,7 @@
|
||||
"label": "Code Coverage",
|
||||
"detail": "Generate code coverage report for a given integration.",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||
"dependsOn": ["Compile English translations"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update syrupy snapshots",
|
||||
"detail": "Update syrupy snapshots for a given integration.",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName} --snapshot-update",
|
||||
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
|
||||
"dependsOn": ["Compile English translations"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
@@ -163,7 +133,7 @@
|
||||
"label": "Compile English translations",
|
||||
"detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m script.translations develop --all",
|
||||
"command": "python3 -m script.translations develop --all",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
@@ -173,7 +143,7 @@
|
||||
"label": "Run scaffold",
|
||||
"detail": "Add new functionality to a integration using a scaffold.",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
|
||||
"command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
@@ -183,7 +153,7 @@
|
||||
"label": "Create new integration",
|
||||
"detail": "Use the scaffold to create a new integration.",
|
||||
"type": "shell",
|
||||
"command": "${command:python.interpreterPath} -m script.scaffold integration",
|
||||
"command": "python3 -m script.scaffold integration",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
|
||||
55
CODEOWNERS
55
CODEOWNERS
@@ -284,8 +284,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
/tests/components/coolmaster/ @OnFreund
|
||||
/homeassistant/components/counter/ @fabaff
|
||||
@@ -387,8 +385,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/efergy/ @tkdrob
|
||||
/tests/components/efergy/ @tkdrob
|
||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||
/homeassistant/components/eheimdigital/ @autinerd
|
||||
/tests/components/eheimdigital/ @autinerd
|
||||
/homeassistant/components/electrasmart/ @jafar-atili
|
||||
/tests/components/electrasmart/ @jafar-atili
|
||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||
@@ -578,8 +574,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/google_tasks/ @allenporter
|
||||
/homeassistant/components/google_travel_time/ @eifinger
|
||||
/tests/components/google_travel_time/ @eifinger
|
||||
/homeassistant/components/govee_ble/ @bdraco
|
||||
/tests/components/govee_ble/ @bdraco
|
||||
/homeassistant/components/govee_ble/ @bdraco @PierreAronnax
|
||||
/tests/components/govee_ble/ @bdraco @PierreAronnax
|
||||
/homeassistant/components/govee_light_local/ @Galorhallen
|
||||
/tests/components/govee_light_local/ @Galorhallen
|
||||
/homeassistant/components/gpsd/ @fabaff @jrieger
|
||||
@@ -731,8 +727,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ios/ @robbiet480
|
||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/tests/components/iotawatt/ @gtdiehl @jyavenard
|
||||
/homeassistant/components/iotty/ @shapournemati-iotty
|
||||
/tests/components/iotty/ @shapournemati-iotty
|
||||
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/tests/components/iotty/ @pburgio @shapournemati-iotty
|
||||
/homeassistant/components/iperf3/ @rohankapoorcom
|
||||
/homeassistant/components/ipma/ @dgomes
|
||||
/tests/components/ipma/ @dgomes
|
||||
@@ -757,8 +753,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ista_ecotrend/ @tr4nt0r
|
||||
/homeassistant/components/isy994/ @bdraco @shbatm
|
||||
/tests/components/isy994/ @bdraco @shbatm
|
||||
/homeassistant/components/ituran/ @shmuelzon
|
||||
/tests/components/ituran/ @shmuelzon
|
||||
/homeassistant/components/izone/ @Swamp-Ig
|
||||
/tests/components/izone/ @Swamp-Ig
|
||||
/homeassistant/components/jellyfin/ @j-stienstra @ctalkington
|
||||
@@ -1010,8 +1004,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nice_go/ @IceBotYT
|
||||
/homeassistant/components/nightscout/ @marciogranzotto
|
||||
/tests/components/nightscout/ @marciogranzotto
|
||||
/homeassistant/components/niko_home_control/ @VandeurenGlenn
|
||||
/tests/components/niko_home_control/ @VandeurenGlenn
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
@@ -1053,8 +1045,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/octoprint/ @rfleming71
|
||||
/tests/components/octoprint/ @rfleming71
|
||||
/homeassistant/components/ohmconnect/ @robbiet480
|
||||
/homeassistant/components/ohme/ @dan-r
|
||||
/tests/components/ohme/ @dan-r
|
||||
/homeassistant/components/ollama/ @synesthesiam
|
||||
/tests/components/ollama/ @synesthesiam
|
||||
/homeassistant/components/ombi/ @larssont
|
||||
@@ -1066,8 +1056,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ondilo_ico/ @JeromeHXP
|
||||
/homeassistant/components/onewire/ @garbled1 @epenet
|
||||
/tests/components/onewire/ @garbled1 @epenet
|
||||
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
|
||||
/tests/components/onkyo/ @arturpragacz @eclair4151
|
||||
/homeassistant/components/onkyo/ @arturpragacz
|
||||
/tests/components/onkyo/ @arturpragacz
|
||||
/homeassistant/components/onvif/ @hunterjm
|
||||
/tests/components/onvif/ @hunterjm
|
||||
/homeassistant/components/open_meteo/ @frenck
|
||||
@@ -1103,10 +1093,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/otbr/ @home-assistant/core
|
||||
/homeassistant/components/ourgroceries/ @OnFreund
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1115,8 +1103,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/peblar/ @frenck
|
||||
/tests/components/peblar/ @frenck
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
/tests/components/peco/ @IceBotYT
|
||||
/homeassistant/components/pegel_online/ @mib1185
|
||||
@@ -1137,16 +1123,14 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plaato/ @JohNan
|
||||
/homeassistant/components/plex/ @jjlawren
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
/tests/components/powerfox/ @klaasnicolaas
|
||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
/homeassistant/components/private_ble_device/ @Jc2k
|
||||
@@ -1369,8 +1353,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||
/homeassistant/components/slide/ @ualex73
|
||||
/homeassistant/components/slide_local/ @dontinelli
|
||||
/tests/components/slide_local/ @dontinelli
|
||||
/homeassistant/components/slimproto/ @marcelveldt
|
||||
/tests/components/slimproto/ @marcelveldt
|
||||
/homeassistant/components/sma/ @kellerza @rklomp
|
||||
@@ -1429,13 +1411,15 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/starline/ @anonym-tsk
|
||||
/homeassistant/components/starlink/ @boswelja
|
||||
/tests/components/starlink/ @boswelja
|
||||
/homeassistant/components/statistics/ @ThomDietrich @gjohansson-ST
|
||||
/tests/components/statistics/ @ThomDietrich @gjohansson-ST
|
||||
/homeassistant/components/statistics/ @ThomDietrich
|
||||
/tests/components/statistics/ @ThomDietrich
|
||||
/homeassistant/components/steam_online/ @tkdrob
|
||||
/tests/components/steam_online/ @tkdrob
|
||||
/homeassistant/components/steamist/ @bdraco
|
||||
/tests/components/steamist/ @bdraco
|
||||
/homeassistant/components/stiebel_eltron/ @fucm
|
||||
/homeassistant/components/stookalert/ @fwestenberg @frenck
|
||||
/tests/components/stookalert/ @fwestenberg @frenck
|
||||
/homeassistant/components/stookwijzer/ @fwestenberg
|
||||
/tests/components/stookwijzer/ @fwestenberg
|
||||
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
|
||||
@@ -1480,8 +1464,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tado/ @chiefdragon @erwindouna
|
||||
/tests/components/tado/ @chiefdragon @erwindouna
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
@@ -1575,8 +1559,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/triggercmd/ @rvmey
|
||||
/homeassistant/components/tts/ @home-assistant/core
|
||||
/tests/components/tts/ @home-assistant/core
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver
|
||||
/tests/components/tuya/ @Tuya @zlinoliver
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/homeassistant/components/twentemilieu/ @frenck
|
||||
/tests/components/twentemilieu/ @frenck
|
||||
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
|
||||
@@ -1658,8 +1642,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/waqi/ @joostlek
|
||||
/homeassistant/components/water_heater/ @home-assistant/core
|
||||
/tests/components/water_heater/ @home-assistant/core
|
||||
/homeassistant/components/watergate/ @adam-the-hero
|
||||
/tests/components/watergate/ @adam-the-hero
|
||||
/homeassistant/components/watson_tts/ @rutkai
|
||||
/homeassistant/components/watttime/ @bachya
|
||||
/tests/components/watttime/ @bachya
|
||||
@@ -1744,7 +1726,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/youless/ @gjong
|
||||
/homeassistant/components/youtube/ @joostlek
|
||||
/tests/components/youtube/ @joostlek
|
||||
/homeassistant/components/zabbix/ @kruton
|
||||
/homeassistant/components/zamg/ @killer0071234
|
||||
/tests/components/zamg/ @killer0071234
|
||||
/homeassistant/components/zengge/ @emontnemery
|
||||
|
||||
@@ -13,7 +13,7 @@ ENV \
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.5.8
|
||||
RUN pip3 install uv==0.5.4
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.13
|
||||
FROM mcr.microsoft.com/devcontainers/python:1-3.12
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
|
||||
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
||||
@@ -115,7 +115,7 @@ class AuthManagerFlowManager(
|
||||
*,
|
||||
context: AuthFlowContext | None = None,
|
||||
data: dict[str, Any] | None = None,
|
||||
) -> LoginFlow[Any]:
|
||||
) -> LoginFlow:
|
||||
"""Create a login flow."""
|
||||
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
|
||||
if not auth_provider:
|
||||
|
||||
@@ -4,9 +4,8 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import types
|
||||
from typing import Any, Generic
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -35,12 +34,6 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MultiFactorAuthModuleT = TypeVar(
|
||||
"_MultiFactorAuthModuleT",
|
||||
bound="MultiFactorAuthModule",
|
||||
default="MultiFactorAuthModule",
|
||||
)
|
||||
|
||||
|
||||
class MultiFactorAuthModule:
|
||||
"""Multi-factor Auth Module of validation function."""
|
||||
@@ -78,7 +71,7 @@ class MultiFactorAuthModule:
|
||||
"""Return a voluptuous schema to define mfa auth module's input."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]:
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@@ -102,14 +95,11 @@ class MultiFactorAuthModule:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]):
|
||||
class SetupFlow(data_entry_flow.FlowHandler):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
auth_module: _MultiFactorAuthModuleT,
|
||||
setup_schema: vol.Schema,
|
||||
user_id: str,
|
||||
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
self._auth_module = auth_module
|
||||
|
||||
@@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
return sorted(unordered_services)
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> NotifySetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
await self.hass.services.async_call("notify", notify_service, data)
|
||||
|
||||
|
||||
class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
class NotifySetupFlow(SetupFlow):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(
|
||||
@@ -280,6 +280,8 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
super().__init__(auth_module, setup_schema, user_id)
|
||||
# to fix typing complaint
|
||||
self._auth_module: NotifyAuthModule = auth_module
|
||||
self._available_notify_services = available_notify_services
|
||||
self._secret: str | None = None
|
||||
self._count: int | None = None
|
||||
|
||||
@@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
self._users[user_id] = ota_secret # type: ignore[index]
|
||||
return ota_secret
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> TotpSetupFlow:
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@@ -174,9 +174,10 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
|
||||
|
||||
|
||||
class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
||||
class TotpSetupFlow(SetupFlow):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
_auth_module: TotpAuthModule
|
||||
_ota_secret: str
|
||||
_url: str
|
||||
_image: str
|
||||
|
||||
@@ -5,9 +5,8 @@ from __future__ import annotations
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import types
|
||||
from typing import Any, Generic
|
||||
from typing import Any
|
||||
|
||||
from typing_extensions import TypeVar
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -47,8 +46,6 @@ AUTH_PROVIDER_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider")
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
@@ -108,7 +105,7 @@ class AuthProvider:
|
||||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
"""Return the data flow for logging in with auth provider.
|
||||
|
||||
Auth provider should extend LoginFlow and return an instance.
|
||||
@@ -195,15 +192,12 @@ async def load_auth_provider_module(
|
||||
return module
|
||||
|
||||
|
||||
class LoginFlow(
|
||||
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
|
||||
Generic[_AuthProviderT],
|
||||
):
|
||||
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
_flow_result = AuthFlowResult
|
||||
|
||||
def __init__(self, auth_provider: _AuthProviderT) -> None:
|
||||
def __init__(self, auth_provider: AuthProvider) -> None:
|
||||
"""Initialize the login flow."""
|
||||
self._auth_provider = auth_provider
|
||||
self._auth_module_id: str | None = None
|
||||
|
||||
@@ -6,7 +6,7 @@ import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -59,9 +59,7 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._user_meta: dict[str, dict[str, Any]] = {}
|
||||
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> CommandLineLoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return CommandLineLoginFlow(self)
|
||||
|
||||
@@ -135,7 +133,7 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
)
|
||||
|
||||
|
||||
class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]):
|
||||
class CommandLineLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -147,9 +145,9 @@ class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]):
|
||||
if user_input is not None:
|
||||
user_input["username"] = user_input["username"].strip()
|
||||
try:
|
||||
await self._auth_provider.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
await cast(
|
||||
CommandLineAuthProvider, self._auth_provider
|
||||
).async_validate_login(user_input["username"], user_input["password"])
|
||||
except InvalidAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
|
||||
@@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
|
||||
await data.async_load()
|
||||
self.data = data
|
||||
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return HassLoginFlow(self)
|
||||
|
||||
@@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider):
|
||||
pass
|
||||
|
||||
|
||||
class HassLoginFlow(LoginFlow[HassAuthProvider]):
|
||||
class HassLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow[HassAuthProvider]):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self._auth_provider.async_validate_login(
|
||||
await cast(HassAuthProvider, self._auth_provider).async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuth:
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import hmac
|
||||
from typing import cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -35,9 +36,7 @@ class InvalidAuthError(HomeAssistantError):
|
||||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> ExampleLoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return ExampleLoginFlow(self)
|
||||
|
||||
@@ -94,7 +93,7 @@ class ExampleAuthProvider(AuthProvider):
|
||||
return UserMeta(name=name, is_active=True)
|
||||
|
||||
|
||||
class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]):
|
||||
class ExampleLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -105,7 +104,7 @@ class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
self._auth_provider.async_validate_login(
|
||||
cast(ExampleAuthProvider, self._auth_provider).async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuthError:
|
||||
|
||||
@@ -104,9 +104,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
"""Trusted Networks auth provider does not support MFA."""
|
||||
return False
|
||||
|
||||
async def async_login_flow(
|
||||
self, context: AuthFlowContext | None
|
||||
) -> TrustedNetworksLoginFlow:
|
||||
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
assert context is not None
|
||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||
@@ -216,7 +214,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
self.async_validate_access(ip_address(remote_ip))
|
||||
|
||||
|
||||
class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]):
|
||||
class TrustedNetworksLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
def __init__(
|
||||
@@ -237,7 +235,9 @@ class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]):
|
||||
) -> AuthFlowResult:
|
||||
"""Handle the step of the form."""
|
||||
try:
|
||||
self._auth_provider.async_validate_access(self._ip_address)
|
||||
cast(
|
||||
TrustedNetworksAuthProvider, self._auth_provider
|
||||
).async_validate_access(self._ip_address)
|
||||
|
||||
except InvalidAuthError:
|
||||
return self.async_abort(reason="not_allowed")
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@@ -18,12 +14,7 @@ import securetar
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_BACKUPS = ("backups",)
|
||||
KEEP_DATABASE = (
|
||||
"home-assistant_v2.db",
|
||||
"home-assistant_v2.db-wal",
|
||||
)
|
||||
|
||||
KEEP_PATHS = ("backups",)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -33,21 +24,6 @@ class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
password: str | None
|
||||
remove_after_restore: bool
|
||||
restore_database: bool
|
||||
restore_homeassistant: bool
|
||||
|
||||
|
||||
def password_to_key(password: str) -> bytes:
|
||||
"""Generate a AES Key from password.
|
||||
|
||||
Matches the implementation in supervisor.backups.utils.password_to_key.
|
||||
"""
|
||||
key: bytes = password.encode()
|
||||
for _ in range(100):
|
||||
key = hashlib.sha256(key).digest()
|
||||
return key[:16]
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
@@ -56,27 +32,20 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent |
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"]),
|
||||
password=instruction_content["password"],
|
||||
remove_after_restore=instruction_content["remove_after_restore"],
|
||||
restore_database=instruction_content["restore_database"],
|
||||
restore_homeassistant=instruction_content["restore_homeassistant"],
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
)
|
||||
except (FileNotFoundError, KeyError, json.JSONDecodeError):
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return None
|
||||
finally:
|
||||
# Always remove the backup instruction file to prevent a boot loop
|
||||
instruction_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None:
|
||||
"""Delete all files and directories in the config directory except entries in the keep list."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in keep]
|
||||
entries_to_remove = sorted(
|
||||
entry for entry in config_dir.iterdir() if entry not in keep_paths
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
)
|
||||
|
||||
for entry in entries_to_remove:
|
||||
for entry in config_contents:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
@@ -85,15 +54,12 @@ def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> Non
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(
|
||||
config_dir: Path,
|
||||
restore_content: RestoreBackupFileContent,
|
||||
) -> None:
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
restore_content.backup_file_path,
|
||||
backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
@@ -122,41 +88,22 @@ def _extract_backup(
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
key=password_to_key(restore_content.password)
|
||||
if restore_content.password is not None
|
||||
else None,
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=Path(tempdir, "homeassistant"),
|
||||
members=securetar.secure_path(istf),
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
filter="fully_trusted",
|
||||
)
|
||||
if restore_content.restore_homeassistant:
|
||||
keep = list(KEEP_BACKUPS)
|
||||
if not restore_content.restore_database:
|
||||
keep.extend(KEEP_DATABASE)
|
||||
_clear_configuration_directory(config_dir, keep)
|
||||
shutil.copytree(
|
||||
Path(tempdir, "homeassistant", "data"),
|
||||
config_dir,
|
||||
dirs_exist_ok=True,
|
||||
ignore=shutil.ignore_patterns(*(keep)),
|
||||
)
|
||||
elif restore_content.restore_database:
|
||||
for entry in KEEP_DATABASE:
|
||||
entrypath = config_dir / entry
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
for entry in KEEP_DATABASE:
|
||||
shutil.copy(
|
||||
Path(tempdir, "homeassistant", "data", entry),
|
||||
config_dir,
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
@@ -172,13 +119,8 @@ def restore_backup(config_dir_path: str) -> bool:
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(
|
||||
config_dir=config_dir,
|
||||
restore_content=restore_content,
|
||||
)
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
if restore_content.remove_after_restore:
|
||||
backup_file_path.unlink(missing_ok=True)
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
||||
|
||||
@@ -50,12 +50,6 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If only cadata is passed, we can ignore it
|
||||
kwargs = mapped_args.get("kwargs")
|
||||
return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs)
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class BlockingCall:
|
||||
"""Class to hold information about a blocking call."""
|
||||
@@ -164,7 +158,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
original_func=SSLContext.load_verify_locations,
|
||||
object=SSLContext,
|
||||
function="load_verify_locations",
|
||||
check_allowed=_check_load_verify_locations_call_allowed,
|
||||
check_allowed=None,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
|
||||
@@ -252,7 +252,6 @@ PRELOAD_STORAGE = [
|
||||
"assist_pipeline.pipelines",
|
||||
"core.analytics",
|
||||
"auth_module.totp",
|
||||
"backup",
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"domain": "microsoft",
|
||||
"name": "Microsoft",
|
||||
"integrations": [
|
||||
"azure_data_explorer",
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"domain": "slide",
|
||||
"name": "Slide",
|
||||
"integrations": ["slide", "slide_local"]
|
||||
}
|
||||
@@ -9,16 +9,18 @@ from jaraco.abode.devices.light import Light
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_HS_COLOR,
|
||||
DEFAULT_MAX_KELVIN,
|
||||
DEFAULT_MIN_KELVIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_kelvin_to_mired,
|
||||
color_temperature_mired_to_kelvin,
|
||||
)
|
||||
|
||||
from . import AbodeSystem
|
||||
from .const import DOMAIN
|
||||
@@ -42,13 +44,13 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
|
||||
_device: Light
|
||||
_attr_name = None
|
||||
_attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
|
||||
_attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
if ATTR_COLOR_TEMP_KELVIN in kwargs and self._device.is_color_capable:
|
||||
self._device.set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN])
|
||||
if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable:
|
||||
self._device.set_color_temp(
|
||||
int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]))
|
||||
)
|
||||
return
|
||||
|
||||
if ATTR_HS_COLOR in kwargs and self._device.is_color_capable:
|
||||
@@ -83,10 +85,10 @@ class AbodeLight(AbodeDevice, LightEntity):
|
||||
return None
|
||||
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
def color_temp(self) -> int | None:
|
||||
"""Return the color temp of the light."""
|
||||
if self._device.has_color:
|
||||
return int(self._device.color_temp)
|
||||
return color_temperature_kelvin_to_mired(self._device.color_temp)
|
||||
return None
|
||||
|
||||
@property
|
||||
|
||||
@@ -16,9 +16,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
|
||||
@@ -42,7 +42,7 @@ class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
mac = user_input[CONF_ADDRESS]
|
||||
mac = format_mac(user_input[CONF_ADDRESS])
|
||||
try:
|
||||
is_new_style_scale = await is_new_scale(mac)
|
||||
except AcaiaDeviceNotFound:
|
||||
@@ -53,12 +53,12 @@ class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
except AcaiaUnknownDevice:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
else:
|
||||
await self.async_set_unique_id(format_mac(mac))
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[mac],
|
||||
title=self._discovered_devices[user_input[CONF_ADDRESS]],
|
||||
data={
|
||||
CONF_ADDRESS: mac,
|
||||
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||
@@ -99,10 +99,10 @@ class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Bluetooth device."""
|
||||
|
||||
self._discovered[CONF_ADDRESS] = discovery_info.address
|
||||
self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address)
|
||||
self._discovered[CONF_NAME] = discovery_info.name
|
||||
|
||||
await self.async_set_unique_id(format_mac(discovery_info.address))
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
|
||||
@@ -2,11 +2,7 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -29,15 +25,13 @@ class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._scale = coordinator.scale
|
||||
formatted_mac = format_mac(self._scale.mac)
|
||||
self._attr_unique_id = f"{formatted_mac}_{entity_description.key}"
|
||||
self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, formatted_mac)},
|
||||
identifiers={(DOMAIN, self._scale.mac)},
|
||||
manufacturer="Acaia",
|
||||
model=self._scale.model,
|
||||
suggested_area="Kitchen",
|
||||
connections={(CONNECTION_BLUETOOTH, self._scale.mac)},
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -25,6 +25,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.9"]
|
||||
}
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No explicit event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device is expected to be offline most of the time, but needs to connect quickly once available.
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
Handled by coordinator.
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
No authentication required.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
No IP discovery.
|
||||
discovery:
|
||||
status: done
|
||||
comment: |
|
||||
Bluetooth discovery.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
No noisy/non-essential entities.
|
||||
entity-translations: done
|
||||
exception-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom exceptions.
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
Only parameter that could be changed (MAC = unique_id) would force a new config entry.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No repairs/issues.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device type integration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
Bluetooth connection.
|
||||
strict-typing: done
|
||||
@@ -21,9 +21,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
@@ -75,6 +75,7 @@ class AdaxDevice(ClimateEntity):
|
||||
)
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
|
||||
"""Initialize the heater."""
|
||||
|
||||
@@ -37,7 +37,7 @@ STATE_KEY_POSITION = "position"
|
||||
|
||||
PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,
|
||||
|
||||
@@ -102,6 +102,7 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_attr_name = None
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
_support_preset = ClimateEntityFeature(0)
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
|
||||
@@ -260,6 +261,7 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
|
||||
_attr_target_temperature_step = PRECISION_WHOLE
|
||||
_attr_max_temp = 32
|
||||
_attr_min_temp = 16
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
|
||||
"""Initialize an AdvantageAir Zone control."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""The AEMET OpenData component."""
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
|
||||
@@ -11,9 +10,8 @@ from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DOMAIN, PLATFORMS
|
||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -26,15 +24,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
||||
latitude = entry.data[CONF_LATITUDE]
|
||||
longitude = entry.data[CONF_LONGITUDE]
|
||||
update_features: int = UpdateFeature.FORECAST
|
||||
if entry.options.get(CONF_RADAR_UPDATES, False):
|
||||
update_features |= UpdateFeature.RADAR
|
||||
if entry.options.get(CONF_STATION_UPDATES, True):
|
||||
update_features |= UpdateFeature.STATION
|
||||
|
||||
options = ConnectionOptions(api_key, update_features)
|
||||
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
|
||||
aemet.set_api_data_dir(hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"))
|
||||
|
||||
try:
|
||||
await aemet.select_coordinates(latitude, longitude)
|
||||
except TownNotFound as err:
|
||||
@@ -63,11 +57,3 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Remove a config entry."""
|
||||
await hass.async_add_executor_job(
|
||||
shutil.rmtree,
|
||||
hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"),
|
||||
)
|
||||
|
||||
@@ -17,11 +17,10 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
|
||||
from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN
|
||||
from .const import CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN
|
||||
|
||||
OPTIONS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_RADAR_UPDATES, default=False): bool,
|
||||
vol.Required(CONF_STATION_UPDATES, default=True): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -51,9 +51,8 @@ from homeassistant.components.weather import (
|
||||
from homeassistant.const import Platform
|
||||
|
||||
ATTRIBUTION = "Powered by AEMET OpenData"
|
||||
CONF_RADAR_UPDATES = "radar_updates"
|
||||
CONF_STATION_UPDATES = "station_updates"
|
||||
PLATFORMS = [Platform.IMAGE, Platform.SENSOR, Platform.WEATHER]
|
||||
PLATFORMS = [Platform.SENSOR, Platform.WEATHER]
|
||||
DEFAULT_NAME = "AEMET"
|
||||
DOMAIN = "aemet"
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aemet_opendata.const import AOD_COORDS, AOD_IMG_BYTES
|
||||
from aemet_opendata.const import AOD_COORDS
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import (
|
||||
@@ -26,7 +26,6 @@ TO_REDACT_CONFIG = [
|
||||
|
||||
TO_REDACT_COORD = [
|
||||
AOD_COORDS,
|
||||
AOD_IMG_BYTES,
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
"""Support for the AEMET OpenData images."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
from aemet_opendata.const import AOD_DATETIME, AOD_IMG_BYTES, AOD_IMG_TYPE, AOD_RADAR
|
||||
from aemet_opendata.helpers import dict_nested_value
|
||||
|
||||
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
AEMET_IMAGES: Final[tuple[ImageEntityDescription, ...]] = (
|
||||
ImageEntityDescription(
|
||||
key=AOD_RADAR,
|
||||
translation_key="weather_radar",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AemetConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AEMET OpenData image entities based on a config entry."""
|
||||
domain_data = config_entry.runtime_data
|
||||
name = domain_data.name
|
||||
coordinator = domain_data.coordinator
|
||||
|
||||
unique_id = config_entry.unique_id
|
||||
assert unique_id is not None
|
||||
|
||||
async_add_entities(
|
||||
AemetImage(
|
||||
hass,
|
||||
name,
|
||||
coordinator,
|
||||
description,
|
||||
unique_id,
|
||||
)
|
||||
for description in AEMET_IMAGES
|
||||
if dict_nested_value(coordinator.data["lib"], [description.key]) is not None
|
||||
)
|
||||
|
||||
|
||||
class AemetImage(AemetEntity, ImageEntity):
|
||||
"""Implementation of an AEMET OpenData image."""
|
||||
|
||||
entity_description: ImageEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
name: str,
|
||||
coordinator: WeatherUpdateCoordinator,
|
||||
description: ImageEntityDescription,
|
||||
unique_id: str,
|
||||
) -> None:
|
||||
"""Initialize the image."""
|
||||
super().__init__(coordinator, name, unique_id)
|
||||
ImageEntity.__init__(self, hass)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{unique_id}-{description.key}"
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update image attributes."""
|
||||
image_data = self.get_aemet_value([self.entity_description.key])
|
||||
self._cached_image = Image(
|
||||
content_type=image_data.get(AOD_IMG_TYPE),
|
||||
content=image_data.get(AOD_IMG_BYTES),
|
||||
)
|
||||
self._attr_image_last_updated = image_data.get(AOD_DATETIME)
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aemet",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aemet_opendata"],
|
||||
"requirements": ["AEMET-OpenData==0.6.4"]
|
||||
"requirements": ["AEMET-OpenData==0.6.3"]
|
||||
}
|
||||
|
||||
@@ -18,18 +18,10 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"image": {
|
||||
"weather_radar": {
|
||||
"name": "Weather radar"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"radar_updates": "Gather data from AEMET weather radar",
|
||||
"station_updates": "Gather data from AEMET weather stations"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,9 +31,7 @@ rules:
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
@@ -43,16 +41,12 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery:
|
||||
status: todo
|
||||
comment: DHCP is still possible
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
|
||||
@@ -95,6 +95,7 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator, ac_number, info):
|
||||
"""Initialize the climate device."""
|
||||
@@ -204,6 +205,7 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = AT_GROUP_MODES
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator, group_number, info):
|
||||
"""Initialize the climate device."""
|
||||
|
||||
@@ -124,6 +124,7 @@ class Airtouch5ClimateEntity(ClimateEntity, Airtouch5Entity):
|
||||
_attr_translation_key = DOMAIN
|
||||
_attr_target_temperature_step = 1
|
||||
_attr_name = None
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
|
||||
class Airtouch5AC(Airtouch5ClimateEntity):
|
||||
|
||||
@@ -136,6 +136,7 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
|
||||
_attr_name = None
|
||||
_speeds: dict[int, str] = {}
|
||||
_speeds_reverse: dict[str, int] = {}
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -177,6 +177,7 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||
|
||||
_attr_name = None
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def _init_attributes(self) -> None:
|
||||
"""Init common climate device attributes."""
|
||||
@@ -193,6 +194,12 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
|
||||
if (
|
||||
self.get_airzone_value(AZD_SPEED) is not None
|
||||
and self.get_airzone_value(AZD_SPEEDS) is not None
|
||||
):
|
||||
self._initialize_fan_speeds()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Update attributes when the coordinator updates."""
|
||||
@@ -207,6 +214,8 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
|
||||
self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[
|
||||
self.get_airzone_value(AZD_ACTION)
|
||||
]
|
||||
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
||||
if self.get_airzone_value(AZD_POWER):
|
||||
self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[
|
||||
self.get_airzone_value(AZD_MODE)
|
||||
@@ -243,22 +252,6 @@ class AirzoneDeviceClimate(AirzoneClimate):
|
||||
_speeds: dict[int, str]
|
||||
_speeds_reverse: dict[str, int]
|
||||
|
||||
def _init_attributes(self) -> None:
|
||||
"""Init common climate device attributes."""
|
||||
super()._init_attributes()
|
||||
if (
|
||||
self.get_airzone_value(AZD_SPEED) is not None
|
||||
and self.get_airzone_value(AZD_SPEEDS) is not None
|
||||
):
|
||||
self._initialize_fan_speeds()
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update climate attributes."""
|
||||
super()._async_update_attrs()
|
||||
if self.supported_features & ClimateEntityFeature.FAN_MODE:
|
||||
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
|
||||
|
||||
def _initialize_fan_speeds(self) -> None:
|
||||
"""Initialize fan speeds."""
|
||||
azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS)
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Final, final
|
||||
|
||||
@@ -26,14 +27,26 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.config_validation import make_entity_service_schema
|
||||
from homeassistant.helpers.deprecation import (
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
||||
from homeassistant.helpers.frame import ReportBehavior, report_usage
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
from .const import (
|
||||
from .const import ( # noqa: F401
|
||||
_DEPRECATED_FORMAT_NUMBER,
|
||||
_DEPRECATED_FORMAT_TEXT,
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_AWAY,
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_HOME,
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_NIGHT,
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_VACATION,
|
||||
_DEPRECATED_SUPPORT_ALARM_TRIGGER,
|
||||
ATTR_CHANGED_BY,
|
||||
ATTR_CODE_ARM_REQUIRED,
|
||||
DOMAIN,
|
||||
@@ -150,6 +163,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
_alarm_control_panel_option_default_code: str | None = None
|
||||
|
||||
__alarm_legacy_state: bool = False
|
||||
__alarm_legacy_state_reported: bool = False
|
||||
|
||||
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||
"""Post initialisation processing."""
|
||||
@@ -166,7 +180,9 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
unless already reported.
|
||||
"""
|
||||
if name == "_attr_state":
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
if self.__alarm_legacy_state_reported is not True:
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
self.__alarm_legacy_state_reported = True
|
||||
return super().__setattr__(name, value)
|
||||
|
||||
@callback
|
||||
@@ -178,7 +194,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
) -> None:
|
||||
"""Start adding an entity to a platform."""
|
||||
super().add_to_platform_start(hass, platform, parallel_updates)
|
||||
if self.__alarm_legacy_state:
|
||||
if self.__alarm_legacy_state and not self.__alarm_legacy_state_reported:
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
|
||||
@callback
|
||||
@@ -187,16 +203,19 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
|
||||
Integrations should implement alarm_state instead of using state directly.
|
||||
"""
|
||||
report_usage(
|
||||
"is setting state directly."
|
||||
f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
|
||||
" property and return its state using the AlarmControlPanelState enum",
|
||||
core_integration_behavior=ReportBehavior.ERROR,
|
||||
custom_integration_behavior=ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.11",
|
||||
integration_domain=self.platform.platform_name if self.platform else None,
|
||||
exclude_integrations={DOMAIN},
|
||||
)
|
||||
self.__alarm_legacy_state_reported = True
|
||||
if "custom_components" in type(self).__module__:
|
||||
# Do not report on core integrations as they have been fixed.
|
||||
report_issue = "report it to the custom integration author."
|
||||
_LOGGER.warning(
|
||||
"Entity %s (%s) is setting state directly"
|
||||
" which will stop working in HA Core 2025.11."
|
||||
" Entities should implement the 'alarm_state' property and"
|
||||
" return its state using the AlarmControlPanelState enum, please %s",
|
||||
self.entity_id,
|
||||
type(self),
|
||||
report_issue,
|
||||
)
|
||||
|
||||
@final
|
||||
@property
|
||||
@@ -355,7 +374,12 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
@cached_property
|
||||
def supported_features(self) -> AlarmControlPanelEntityFeature:
|
||||
"""Return the list of supported features."""
|
||||
return self._attr_supported_features
|
||||
features = self._attr_supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = AlarmControlPanelEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@final
|
||||
@property
|
||||
@@ -393,3 +417,13 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
||||
self._alarm_control_panel_option_default_code = default_code
|
||||
return
|
||||
self._alarm_control_panel_option_default_code = None
|
||||
|
||||
|
||||
# As we import constants of the const module here, we need to add the following
|
||||
# functions to check for deprecated constants again
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
"""Provides the constants needed for component."""
|
||||
|
||||
from enum import IntFlag, StrEnum
|
||||
from functools import partial
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
DOMAIN: Final = "alarm_control_panel"
|
||||
|
||||
ATTR_CHANGED_BY: Final = "changed_by"
|
||||
@@ -31,6 +39,12 @@ class CodeFormat(StrEnum):
|
||||
NUMBER = "number"
|
||||
|
||||
|
||||
# These constants are deprecated as of Home Assistant 2022.5, can be removed in 2025.1
|
||||
# Please use the CodeFormat enum instead.
|
||||
_DEPRECATED_FORMAT_TEXT: Final = DeprecatedConstantEnum(CodeFormat.TEXT, "2025.1")
|
||||
_DEPRECATED_FORMAT_NUMBER: Final = DeprecatedConstantEnum(CodeFormat.NUMBER, "2025.1")
|
||||
|
||||
|
||||
class AlarmControlPanelEntityFeature(IntFlag):
|
||||
"""Supported features of the alarm control panel entity."""
|
||||
|
||||
@@ -42,6 +56,27 @@ class AlarmControlPanelEntityFeature(IntFlag):
|
||||
ARM_VACATION = 32
|
||||
|
||||
|
||||
# These constants are deprecated as of Home Assistant 2022.5
|
||||
# Please use the AlarmControlPanelEntityFeature enum instead.
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_HOME: Final = DeprecatedConstantEnum(
|
||||
AlarmControlPanelEntityFeature.ARM_HOME, "2025.1"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_AWAY: Final = DeprecatedConstantEnum(
|
||||
AlarmControlPanelEntityFeature.ARM_AWAY, "2025.1"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_NIGHT: Final = DeprecatedConstantEnum(
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT, "2025.1"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_ALARM_TRIGGER: Final = DeprecatedConstantEnum(
|
||||
AlarmControlPanelEntityFeature.TRIGGER, "2025.1"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS: Final = DeprecatedConstantEnum(
|
||||
AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, "2025.1"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_ALARM_ARM_VACATION: Final = DeprecatedConstantEnum(
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION, "2025.1"
|
||||
)
|
||||
|
||||
CONDITION_TRIGGERED: Final = "is_triggered"
|
||||
CONDITION_DISARMED: Final = "is_disarmed"
|
||||
CONDITION_ARMED_HOME: Final = "is_armed_home"
|
||||
@@ -49,3 +84,10 @@ CONDITION_ARMED_AWAY: Final = "is_armed_away"
|
||||
CONDITION_ARMED_NIGHT: Final = "is_armed_night"
|
||||
CONDITION_ARMED_VACATION: Final = "is_armed_vacation"
|
||||
CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass"
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
||||
@@ -317,7 +317,6 @@ class Alexa(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -404,7 +403,6 @@ class AlexaPowerController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -438,7 +436,7 @@ class AlexaPowerController(AlexaCapability):
|
||||
elif self.entity.domain == remote.DOMAIN:
|
||||
is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN)
|
||||
elif self.entity.domain == vacuum.DOMAIN:
|
||||
is_on = self.entity.state == vacuum.VacuumActivity.CLEANING
|
||||
is_on = self.entity.state == vacuum.STATE_CLEANING
|
||||
elif self.entity.domain == timer.DOMAIN:
|
||||
is_on = self.entity.state != STATE_IDLE
|
||||
elif self.entity.domain == water_heater.DOMAIN:
|
||||
@@ -471,7 +469,6 @@ class AlexaLockController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -526,7 +523,6 @@ class AlexaSceneController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -566,7 +562,6 @@ class AlexaBrightnessController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -616,7 +611,6 @@ class AlexaColorController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -675,7 +669,6 @@ class AlexaColorTemperatureController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -722,7 +715,6 @@ class AlexaSpeaker(AlexaCapability):
|
||||
"fr-FR", # Not documented as of 2021-12-04, see PR #60489
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
}
|
||||
|
||||
def name(self) -> str:
|
||||
@@ -780,7 +772,6 @@ class AlexaStepSpeaker(AlexaCapability):
|
||||
"es-ES",
|
||||
"fr-FR", # Not documented as of 2021-12-04, see PR #60489
|
||||
"it-IT",
|
||||
"nl-NL",
|
||||
}
|
||||
|
||||
def name(self) -> str:
|
||||
@@ -810,7 +801,6 @@ class AlexaPlaybackController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -869,7 +859,6 @@ class AlexaInputController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -1115,7 +1104,6 @@ class AlexaThermostatController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -1257,7 +1245,6 @@ class AlexaPowerLevelController(AlexaCapability):
|
||||
"fr-CA",
|
||||
"fr-FR",
|
||||
"it-IT",
|
||||
"nl-NL",
|
||||
"ja-JP",
|
||||
}
|
||||
|
||||
@@ -1736,7 +1723,6 @@ class AlexaRangeController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -2080,7 +2066,6 @@ class AlexaToggleController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -2227,7 +2212,6 @@ class AlexaPlaybackStateReporter(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -2283,7 +2267,6 @@ class AlexaSeekController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -2377,7 +2360,6 @@ class AlexaEqualizerController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
@@ -2488,7 +2470,6 @@ class AlexaCameraStreamController(AlexaCapability):
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}
|
||||
|
||||
|
||||
@@ -59,7 +59,6 @@ CONF_SUPPORTED_LOCALES = (
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
)
|
||||
|
||||
|
||||
@@ -359,7 +359,7 @@ async def async_api_set_color_temperature(
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: kelvin},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_KELVIN: kelvin},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@@ -376,14 +376,14 @@ async def async_api_decrease_color_temp(
|
||||
) -> AlexaResponse:
|
||||
"""Process a decrease color temperature request."""
|
||||
entity = directive.entity
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
|
||||
min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN])
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP])
|
||||
max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS])
|
||||
|
||||
value = max(min_kelvin, current - 500)
|
||||
value = min(max_mireds, current + 50)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@@ -400,14 +400,14 @@ async def async_api_increase_color_temp(
|
||||
) -> AlexaResponse:
|
||||
"""Process an increase color temperature request."""
|
||||
entity = directive.entity
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
|
||||
max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN])
|
||||
current = int(entity.attributes[light.ATTR_COLOR_TEMP])
|
||||
min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS])
|
||||
|
||||
value = min(max_kelvin, current + 500)
|
||||
value = max(min_mireds, current - 50)
|
||||
await hass.services.async_call(
|
||||
entity.domain,
|
||||
SERVICE_TURN_ON,
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
|
||||
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value},
|
||||
blocking=False,
|
||||
context=context,
|
||||
)
|
||||
@@ -527,7 +527,6 @@ async def async_api_unlock(
|
||||
"hi-IN",
|
||||
"it-IT",
|
||||
"ja-JP",
|
||||
"nl-NL",
|
||||
"pt-BR",
|
||||
}:
|
||||
msg = (
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
}
|
||||
},
|
||||
"enable_motion_recording": {
|
||||
"name": "Enable motion recording",
|
||||
"name": "Enables motion recording",
|
||||
"description": "Enables recording a clip to camera storage when motion is detected.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
@@ -51,8 +51,8 @@
|
||||
}
|
||||
},
|
||||
"disable_motion_recording": {
|
||||
"name": "Disable motion recording",
|
||||
"description": "Disables recording a clip to camera storage when motion is detected.",
|
||||
"name": "Disables motion recording",
|
||||
"description": "Disable recording a clip to camera storage when motion is detected.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]",
|
||||
|
||||
@@ -11,7 +11,12 @@ from python_homeassistant_analytics import (
|
||||
from python_homeassistant_analytics.models import IntegrationType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
@@ -20,7 +25,6 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
)
|
||||
|
||||
from . import AnalyticsInsightsConfigEntry
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
@@ -42,7 +46,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: AnalyticsInsightsConfigEntry,
|
||||
config_entry: ConfigEntry,
|
||||
) -> HomeassistantAnalyticsOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return HomeassistantAnalyticsOptionsFlowHandler()
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: todo
|
||||
docs-installation-instructions: todo
|
||||
docs-removal-instructions: todo
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
The coordinator handles this.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
The coordinator handles this.
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: todo
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and thus does not support discovery.
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
entity-category: done
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities with device classes.
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow:
|
||||
status: exempt
|
||||
comment: All the options of this integration are managed via the options flow
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has a fixed single service.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -110,7 +110,7 @@ def _setup_androidtv(
|
||||
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
|
||||
|
||||
else:
|
||||
# Communicate via ADB server
|
||||
# Use "pure-python-adb" (communicate with ADB server)
|
||||
signer = None
|
||||
adb_log = (
|
||||
"using ADB server at"
|
||||
@@ -135,16 +135,15 @@ async def async_connect_androidtv(
|
||||
)
|
||||
|
||||
aftv = await async_androidtv_setup(
|
||||
host=config[CONF_HOST],
|
||||
port=config[CONF_PORT],
|
||||
adbkey=adbkey,
|
||||
adb_server_ip=config.get(CONF_ADB_SERVER_IP),
|
||||
adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
|
||||
state_detection_rules=state_detection_rules,
|
||||
device_class=config[CONF_DEVICE_CLASS],
|
||||
auth_timeout_s=timeout,
|
||||
signer=signer,
|
||||
log_errors=False,
|
||||
config[CONF_HOST],
|
||||
config[CONF_PORT],
|
||||
adbkey,
|
||||
config.get(CONF_ADB_SERVER_IP),
|
||||
config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
|
||||
state_detection_rules,
|
||||
config[CONF_DEVICE_CLASS],
|
||||
timeout,
|
||||
signer,
|
||||
)
|
||||
|
||||
if not aftv.available:
|
||||
|
||||
@@ -151,5 +151,5 @@ class AndroidTVEntity(Entity):
|
||||
# Using "adb_shell" (Python ADB implementation)
|
||||
self.exceptions = ADB_PYTHON_EXCEPTIONS
|
||||
else:
|
||||
# Communicate via ADB server
|
||||
# Using "pure-python-adb" (communicate with ADB server)
|
||||
self.exceptions = ADB_TCP_EXCEPTIONS
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/androidtv",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["adb_shell", "androidtv"],
|
||||
"requirements": ["adb-shell[async]==0.4.4", "androidtv[async]==0.0.75"]
|
||||
"loggers": ["adb_shell", "androidtv", "pure_python_adb"],
|
||||
"requirements": [
|
||||
"adb-shell[async]==0.4.4",
|
||||
"androidtv[async]==0.0.73",
|
||||
"pure-python-adb[async]==0.3.0.dev0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -156,12 +156,7 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# and one of them, which could end up being in discovery_info.host, is from a
|
||||
# different device. If any of the discovery_info.ip_addresses matches the
|
||||
# existing host, don't update the host.
|
||||
if (
|
||||
existing_config_entry
|
||||
# Ignored entries don't have host
|
||||
and CONF_HOST in existing_config_entry.data
|
||||
and len(discovery_info.ip_addresses) > 1
|
||||
):
|
||||
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
|
||||
existing_host = existing_config_entry.data[CONF_HOST]
|
||||
if existing_host != self.host:
|
||||
if existing_host in [
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.12"]
|
||||
"requirements": ["py-aosmith==1.0.11"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.0"],
|
||||
"requirements": ["pyatv==0.15.1"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["apprise==1.9.1"]
|
||||
"requirements": ["apprise==1.9.0"]
|
||||
}
|
||||
|
||||
@@ -5,17 +5,12 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
|
||||
from APsystemsEZ1 import (
|
||||
APsystemsEZ1M,
|
||||
InverterReturnedError,
|
||||
ReturnAlarmInfo,
|
||||
ReturnOutputData,
|
||||
)
|
||||
from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import LOGGER
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -48,11 +43,6 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
self.api.min_power = device_info.minPower
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
output_data = await self.api.get_output_data()
|
||||
alarm_info = await self.api.get_alarm_info()
|
||||
except InverterReturnedError:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN, translation_key="inverter_error"
|
||||
) from None
|
||||
output_data = await self.api.get_output_data()
|
||||
alarm_info = await self.api.get_alarm_info()
|
||||
return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode
|
||||
from homeassistant.const import UnitOfPower
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -22,7 +20,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
|
||||
add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)], True)
|
||||
add_entities([ApSystemsMaxOutputNumber(config_entry.runtime_data)])
|
||||
|
||||
|
||||
class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
@@ -47,13 +45,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Set the state with the value fetched from the inverter."""
|
||||
try:
|
||||
status = await self._api.get_max_power()
|
||||
except (TimeoutError, ClientConnectorError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = status
|
||||
self._attr_native_value = await self._api.get_max_power()
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the desired output power."""
|
||||
|
||||
@@ -72,10 +72,5 @@
|
||||
"name": "Inverter status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"inverter_error": {
|
||||
"message": "Inverter returned an error"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import time
|
||||
from typing import Any, Literal, cast
|
||||
import wave
|
||||
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import (
|
||||
@@ -30,7 +29,6 @@ from homeassistant.components import (
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import intent
|
||||
@@ -919,11 +917,6 @@ class PipelineRun:
|
||||
)
|
||||
except (asyncio.CancelledError, TimeoutError):
|
||||
raise # expected
|
||||
except hass_nabucasa.auth.Unauthenticated as src_error:
|
||||
raise SpeechToTextError(
|
||||
code="cloud-auth-failed",
|
||||
message="Home Assistant Cloud authentication failed",
|
||||
) from src_error
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during speech-to-text")
|
||||
raise SpeechToTextError(
|
||||
@@ -1016,23 +1009,15 @@ class PipelineRun:
|
||||
if self.intent_agent is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
# LLMs support all languages ('*') so use pipeline language for
|
||||
# intent fallback.
|
||||
input_language = self.pipeline.language
|
||||
else:
|
||||
input_language = self.pipeline.conversation_language
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_START,
|
||||
{
|
||||
"engine": self.intent_agent,
|
||||
"language": input_language,
|
||||
"language": self.pipeline.conversation_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": device_id,
|
||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||
},
|
||||
)
|
||||
)
|
||||
@@ -1043,10 +1028,9 @@ class PipelineRun:
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
language=self.pipeline.language,
|
||||
agent_id=self.intent_agent,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
conversation_result: conversation.ConversationResult | None = None
|
||||
if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT:
|
||||
@@ -1056,7 +1040,7 @@ class PipelineRun:
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input
|
||||
)
|
||||
) is not None:
|
||||
):
|
||||
# Sentence trigger matched
|
||||
trigger_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
@@ -1077,7 +1061,6 @@ class PipelineRun:
|
||||
response=intent_response,
|
||||
conversation_id=user_input.conversation_id,
|
||||
)
|
||||
processed_locally = True
|
||||
|
||||
if conversation_result is None:
|
||||
# Fall back to pipeline conversation agent
|
||||
@@ -1102,10 +1085,7 @@ class PipelineRun:
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.INTENT_END,
|
||||
{
|
||||
"processed_locally": processed_locally,
|
||||
"intent_output": conversation_result.as_dict(),
|
||||
},
|
||||
{"intent_output": conversation_result.as_dict()},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ class VoiceCommandSegmenter:
|
||||
|
||||
self._timeout_seconds_left -= chunk_seconds
|
||||
if self._timeout_seconds_left <= 0:
|
||||
_LOGGER.debug(
|
||||
_LOGGER.warning(
|
||||
"VAD end of speech detection timed out after %s seconds",
|
||||
self.timeout_seconds,
|
||||
)
|
||||
|
||||
@@ -46,6 +46,7 @@ class AtagThermostat(AtagEntity, ClimateEntity):
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, coordinator: AtagDataUpdateCoordinator, atag_id: str) -> None:
|
||||
"""Initialize an Atag climate device."""
|
||||
|
||||
@@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.6"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
"requirements": ["pyaussiebb==0.0.15"]
|
||||
}
|
||||
|
||||
@@ -4,12 +4,11 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from autarco import Autarco, AutarcoConnectionError
|
||||
from autarco import Autarco
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import AutarcoDataUpdateCoordinator
|
||||
@@ -26,12 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AutarcoConfigEntry) -> b
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
|
||||
try:
|
||||
account_sites = await client.get_account()
|
||||
except AutarcoConnectionError as err:
|
||||
await client.close()
|
||||
raise ConfigEntryNotReady from err
|
||||
account_sites = await client.get_account()
|
||||
|
||||
coordinators: list[AutarcoDataUpdateCoordinator] = [
|
||||
AutarcoDataUpdateCoordinator(hass, client, site) for site in account_sites
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from autarco import Autarco, AutarcoAuthenticationError, AutarcoConnectionError
|
||||
@@ -21,12 +20,6 @@ DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
STEP_REAUTH_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Autarco."""
|
||||
@@ -62,40 +55,3 @@ class AutarcoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
data_schema=DATA_SCHEMA,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication request from Autarco."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication confirmation."""
|
||||
errors = {}
|
||||
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
if user_input is not None:
|
||||
client = Autarco(
|
||||
email=reauth_entry.data[CONF_EMAIL],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
try:
|
||||
await client.get_account()
|
||||
except AutarcoAuthenticationError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except AutarcoConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={"email": reauth_entry.data[CONF_EMAIL]},
|
||||
data_schema=STEP_REAUTH_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import NamedTuple
|
||||
from autarco import (
|
||||
AccountSite,
|
||||
Autarco,
|
||||
AutarcoAuthenticationError,
|
||||
AutarcoConnectionError,
|
||||
Battery,
|
||||
Inverter,
|
||||
@@ -17,7 +16,6 @@ from autarco import (
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||
@@ -62,10 +60,8 @@ class AutarcoDataUpdateCoordinator(DataUpdateCoordinator[AutarcoData]):
|
||||
inverters = await self.client.get_inverters(self.account_site.public_key)
|
||||
if site.has_battery:
|
||||
battery = await self.client.get_battery(self.account_site.public_key)
|
||||
except AutarcoAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except AutarcoConnectionError as err:
|
||||
raise UpdateFailed(err) from err
|
||||
except AutarcoConnectionError as error:
|
||||
raise UpdateFailed(error) from error
|
||||
return AutarcoData(
|
||||
solar=solar,
|
||||
inverters=inverters,
|
||||
|
||||
@@ -51,7 +51,7 @@ rules:
|
||||
This integration only polls data using a coordinator.
|
||||
Since the integration is read-only and poll-only (only provide sensor
|
||||
data), there is no need to implement parallel updates.
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Connect to your Autarco account, to get information about your sites.",
|
||||
"description": "Connect to your Autarco account to get information about your solar panels.",
|
||||
"data": {
|
||||
"email": "[%key:common::config_flow::data::email%]",
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
@@ -11,16 +11,6 @@
|
||||
"email": "The email address of your Autarco account.",
|
||||
"password": "The password of your Autarco account."
|
||||
}
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "The password for {email} is no longer valid.",
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::autarco::config::step::user::data_description::password%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -28,8 +18,7 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -6,6 +6,7 @@ from abc import ABC, abstractmethod
|
||||
import asyncio
|
||||
from collections.abc import Callable, Mapping
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any, Protocol, cast
|
||||
|
||||
@@ -50,6 +51,12 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
|
||||
from homeassistant.helpers import condition
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstant,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -79,7 +86,12 @@ from homeassistant.helpers.trace import (
|
||||
trace_get,
|
||||
trace_path,
|
||||
)
|
||||
from homeassistant.helpers.trigger import async_initialize_triggers
|
||||
from homeassistant.helpers.trigger import (
|
||||
TriggerActionType,
|
||||
TriggerData,
|
||||
TriggerInfo,
|
||||
async_initialize_triggers,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.dt import parse_datetime
|
||||
@@ -125,6 +137,20 @@ class IfAction(Protocol):
|
||||
"""AND all conditions."""
|
||||
|
||||
|
||||
# AutomationActionType, AutomationTriggerData,
|
||||
# and AutomationTriggerInfo are deprecated as of 2022.9.
|
||||
# Can be removed in 2025.1
|
||||
_DEPRECATED_AutomationActionType = DeprecatedConstant(
|
||||
TriggerActionType, "TriggerActionType", "2025.1"
|
||||
)
|
||||
_DEPRECATED_AutomationTriggerData = DeprecatedConstant(
|
||||
TriggerData, "TriggerData", "2025.1"
|
||||
)
|
||||
_DEPRECATED_AutomationTriggerInfo = DeprecatedConstant(
|
||||
TriggerInfo, "TriggerInfo", "2025.1"
|
||||
)
|
||||
|
||||
|
||||
@bind_hass
|
||||
def is_on(hass: HomeAssistant, entity_id: str) -> bool:
|
||||
"""Return true if specified automation entity_id is on.
|
||||
@@ -451,7 +477,6 @@ class UnavailableAutomationEntity(BaseAutomationEntity):
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
await super().async_will_remove_from_hass()
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"{self.entity_id}_validation_{self._validation_status}"
|
||||
@@ -1194,3 +1219,11 @@ def websocket_config(
|
||||
"config": automation.raw_config,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["axis"],
|
||||
"requirements": ["axis==64"],
|
||||
"requirements": ["axis==63"],
|
||||
"ssdp": [
|
||||
{
|
||||
"manufacturer": "AXIS"
|
||||
|
||||
@@ -5,85 +5,36 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import backup # noqa: F401
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupAgentPlatformProtocol,
|
||||
LocalBackupAgent,
|
||||
)
|
||||
from .const import DATA_MANAGER, DOMAIN
|
||||
from .const import DATA_MANAGER, DOMAIN, LOGGER
|
||||
from .http import async_register_http_views
|
||||
from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
)
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
from .manager import BackupManager
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
"AddonInfo",
|
||||
"AgentBackup",
|
||||
"ManagerBackup",
|
||||
"BackupAgent",
|
||||
"BackupAgentError",
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Backup integration."""
|
||||
with_hassio = is_hassio(hass)
|
||||
|
||||
reader_writer: BackupReaderWriter
|
||||
if not with_hassio:
|
||||
reader_writer = CoreBackupReaderWriter(hass)
|
||||
else:
|
||||
# pylint: disable-next=import-outside-toplevel, hass-component-root-import
|
||||
from homeassistant.components.hassio.backup import SupervisorBackupReaderWriter
|
||||
|
||||
reader_writer = SupervisorBackupReaderWriter(hass)
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
backup_manager = BackupManager(hass)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
await backup_manager.async_setup()
|
||||
|
||||
with_hassio = is_hassio(hass)
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
if with_hassio:
|
||||
if DOMAIN in config:
|
||||
LOGGER.error(
|
||||
"The backup integration is not supported on this installation method, "
|
||||
"please remove it from your configuration"
|
||||
)
|
||||
return True
|
||||
|
||||
async def async_handle_create_service(call: ServiceCall) -> None:
|
||||
"""Service handler for creating backups."""
|
||||
agent_id = list(backup_manager.local_backup_agents)[0]
|
||||
await backup_manager.async_create_backup(
|
||||
agent_ids=[agent_id],
|
||||
include_addons=None,
|
||||
include_all_addons=False,
|
||||
include_database=True,
|
||||
include_folders=None,
|
||||
include_homeassistant=True,
|
||||
name=None,
|
||||
password=None,
|
||||
)
|
||||
await backup_manager.async_create_backup()
|
||||
|
||||
if not with_hassio:
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
hass.services.async_register(DOMAIN, "create", async_handle_create_service)
|
||||
|
||||
async_register_http_views(hass)
|
||||
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
"""Backup agents for the Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from pathlib import Path
|
||||
from typing import Any, Protocol
|
||||
|
||||
from propcache import cached_property
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .models import AgentBackup
|
||||
|
||||
|
||||
class BackupAgentError(HomeAssistantError):
|
||||
"""Base class for backup agent errors."""
|
||||
|
||||
|
||||
class BackupAgentUnreachableError(BackupAgentError):
|
||||
"""Raised when the agent can't reach its API."""
|
||||
|
||||
_message = "The backup agent is unreachable."
|
||||
|
||||
|
||||
class BackupAgent(abc.ABC):
|
||||
"""Backup agent interface."""
|
||||
|
||||
domain: str
|
||||
name: str
|
||||
|
||||
@cached_property
|
||||
def agent_id(self) -> str:
|
||||
"""Return the agent_id."""
|
||||
return f"{self.domain}.{self.name}"
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
:return: An async iterator that yields bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
:param open_stream: A function returning an async iterator that yields bytes.
|
||||
:param backup: Metadata about the backup that should be uploaded.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file.
|
||||
|
||||
:param backup_id: The ID of the backup that was returned in async_list_backups.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
|
||||
|
||||
class LocalBackupAgent(BackupAgent):
|
||||
"""Local backup agent."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup.
|
||||
|
||||
The method should return the path to the backup file with the specified id.
|
||||
"""
|
||||
|
||||
|
||||
class BackupAgentPlatformProtocol(Protocol):
|
||||
"""Define the format of backup platforms which implement backup agents."""
|
||||
|
||||
async def async_get_backup_agents(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
||||
@@ -1,125 +0,0 @@
|
||||
"""Local backup support for Core and Container installations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import json
|
||||
from pathlib import Path
|
||||
from tarfile import TarError
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .agent import BackupAgent, LocalBackupAgent
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import AgentBackup
|
||||
from .util import read_backup
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return the local backup agent."""
|
||||
if is_hassio(hass):
|
||||
return []
|
||||
return [CoreLocalBackupAgent(hass)]
|
||||
|
||||
|
||||
class CoreLocalBackupAgent(LocalBackupAgent):
|
||||
"""Local backup agent for Core and Container installations."""
|
||||
|
||||
domain = DOMAIN
|
||||
name = "local"
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._backup_dir = Path(hass.config.path("backups"))
|
||||
self._backups: dict[str, AgentBackup] = {}
|
||||
self._loaded_backups = False
|
||||
|
||||
async def _load_backups(self) -> None:
|
||||
"""Load data of stored backup files."""
|
||||
backups = await self._hass.async_add_executor_job(self._read_backups)
|
||||
LOGGER.debug("Loaded %s local backups", len(backups))
|
||||
self._backups = backups
|
||||
self._loaded_backups = True
|
||||
|
||||
def _read_backups(self) -> dict[str, AgentBackup]:
|
||||
"""Read backups from disk."""
|
||||
backups: dict[str, AgentBackup] = {}
|
||||
for backup_path in self._backup_dir.glob("*.tar"):
|
||||
try:
|
||||
backup = read_backup(backup_path)
|
||||
backups[backup.backup_id] = backup
|
||||
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
||||
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
||||
return backups
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
self._backups[backup.backup_id] = backup
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
return list(self._backups.values())
|
||||
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
if not self._loaded_backups:
|
||||
await self._load_backups()
|
||||
|
||||
if not (backup := self._backups.get(backup_id)):
|
||||
return None
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
if not await self._hass.async_add_executor_job(backup_path.exists):
|
||||
LOGGER.debug(
|
||||
(
|
||||
"Removing tracked backup (%s) that does not exists on the expected"
|
||||
" path %s"
|
||||
),
|
||||
backup.backup_id,
|
||||
backup_path,
|
||||
)
|
||||
self._backups.pop(backup_id)
|
||||
return None
|
||||
|
||||
return backup
|
||||
|
||||
def get_backup_path(self, backup_id: str) -> Path:
|
||||
"""Return the local path to a backup."""
|
||||
return self._backup_dir / f"{backup_id}.tar"
|
||||
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
"""Delete a backup file."""
|
||||
if await self.async_get_backup(backup_id) is None:
|
||||
return
|
||||
|
||||
backup_path = self.get_backup_path(backup_id)
|
||||
await self._hass.async_add_executor_job(backup_path.unlink, True)
|
||||
LOGGER.debug("Deleted backup located at %s", backup_path)
|
||||
self._backups.pop(backup_id)
|
||||
@@ -1,473 +0,0 @@
|
||||
"""Provide persistent configuration for the backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
|
||||
# The time of the automatic backup event should be compatible with
|
||||
# the time of the recorder's nightly job which runs at 04:12.
|
||||
# Run the backup at 04:45.
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
create_backup: StoredCreateBackupConfig
|
||||
last_attempted_automatic_backup: str | None
|
||||
last_completed_automatic_backup: str | None
|
||||
retention: StoredRetentionConfig
|
||||
schedule: StoredBackupSchedule
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupConfigData:
|
||||
"""Represent loaded backup config data."""
|
||||
|
||||
create_backup: CreateBackupConfig
|
||||
last_attempted_automatic_backup: datetime | None = None
|
||||
last_completed_automatic_backup: datetime | None = None
|
||||
retention: RetentionConfig
|
||||
schedule: BackupSchedule
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: StoredBackupConfig) -> Self:
|
||||
"""Initialize backup config data from a dict."""
|
||||
include_folders_data = data["create_backup"]["include_folders"]
|
||||
if include_folders_data:
|
||||
include_folders = [Folder(folder) for folder in include_folders_data]
|
||||
else:
|
||||
include_folders = None
|
||||
retention = data["retention"]
|
||||
|
||||
if last_attempted_str := data["last_attempted_automatic_backup"]:
|
||||
last_attempted = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if last_attempted_str := data["last_completed_automatic_backup"]:
|
||||
last_completed = dt_util.parse_datetime(last_attempted_str)
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return cls(
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
include_addons=data["create_backup"]["include_addons"],
|
||||
include_all_addons=data["create_backup"]["include_all_addons"],
|
||||
include_database=data["create_backup"]["include_database"],
|
||||
include_folders=include_folders,
|
||||
name=data["create_backup"]["name"],
|
||||
password=data["create_backup"]["password"],
|
||||
),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
retention=RetentionConfig(
|
||||
copies=retention["copies"],
|
||||
days=retention["days"],
|
||||
),
|
||||
schedule=BackupSchedule(state=ScheduleState(data["schedule"]["state"])),
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupConfig:
|
||||
"""Convert backup config data to a dict."""
|
||||
if self.last_attempted_automatic_backup:
|
||||
last_attempted = self.last_attempted_automatic_backup.isoformat()
|
||||
else:
|
||||
last_attempted = None
|
||||
|
||||
if self.last_completed_automatic_backup:
|
||||
last_completed = self.last_completed_automatic_backup.isoformat()
|
||||
else:
|
||||
last_completed = None
|
||||
|
||||
return StoredBackupConfig(
|
||||
create_backup=self.create_backup.to_dict(),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
retention=self.retention.to_dict(),
|
||||
schedule=self.schedule.to_dict(),
|
||||
)
|
||||
|
||||
|
||||
class BackupConfig:
|
||||
"""Handle backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize backup config."""
|
||||
self.data = BackupConfigData(
|
||||
create_backup=CreateBackupConfig(),
|
||||
retention=RetentionConfig(),
|
||||
schedule=BackupSchedule(),
|
||||
)
|
||||
self._manager = manager
|
||||
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
self,
|
||||
*,
|
||||
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
|
||||
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleState | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update config."""
|
||||
if create_backup is not UNDEFINED:
|
||||
self.data.create_backup = replace(self.data.create_backup, **create_backup)
|
||||
if retention is not UNDEFINED:
|
||||
new_retention = RetentionConfig(**retention)
|
||||
if new_retention != self.data.retention:
|
||||
self.data.retention = new_retention
|
||||
self.data.retention.apply(self._manager)
|
||||
if schedule is not UNDEFINED:
|
||||
new_schedule = BackupSchedule(state=schedule)
|
||||
if new_schedule.to_dict() != self.data.schedule.to_dict():
|
||||
self.data.schedule = new_schedule
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
self._manager.store.save()
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class RetentionConfig:
|
||||
"""Represent the backup retention configuration."""
|
||||
|
||||
copies: int | None = None
|
||||
days: int | None = None
|
||||
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
"""Convert backup retention configuration to a dict."""
|
||||
return StoredRetentionConfig(
|
||||
copies=self.copies,
|
||||
days=self.days,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Schedule the next delete after days."""
|
||||
self._unschedule_next(manager)
|
||||
|
||||
async def _delete_backups(now: datetime) -> None:
|
||||
"""Delete backups older than days."""
|
||||
self._schedule_next(manager)
|
||||
|
||||
def _backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return backups older than days to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if self.days is None:
|
||||
return {}
|
||||
now = dt_util.utcnow()
|
||||
return {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if dt_util.parse_datetime(backup.date, raise_on_error=True)
|
||||
+ timedelta(days=self.days)
|
||||
< now
|
||||
}
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
|
||||
manager.remove_next_delete_event = async_call_later(
|
||||
manager.hass, timedelta(days=1), _delete_backups
|
||||
)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next delete after days."""
|
||||
if (remove_next_event := manager.remove_next_delete_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_delete_event = None
|
||||
|
||||
|
||||
class StoredRetentionConfig(TypedDict):
|
||||
"""Represent the stored backup retention configuration."""
|
||||
|
||||
copies: int | None
|
||||
days: int | None
|
||||
|
||||
|
||||
class RetentionParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for retention."""
|
||||
|
||||
copies: int | None
|
||||
days: int | None
|
||||
|
||||
|
||||
class StoredBackupSchedule(TypedDict):
|
||||
"""Represent the stored backup schedule configuration."""
|
||||
|
||||
state: ScheduleState
|
||||
|
||||
|
||||
class ScheduleState(StrEnum):
|
||||
"""Represent the schedule state."""
|
||||
|
||||
NEVER = "never"
|
||||
DAILY = "daily"
|
||||
MONDAY = "mon"
|
||||
TUESDAY = "tue"
|
||||
WEDNESDAY = "wed"
|
||||
THURSDAY = "thu"
|
||||
FRIDAY = "fri"
|
||||
SATURDAY = "sat"
|
||||
SUNDAY = "sun"
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class BackupSchedule:
|
||||
"""Represent the backup schedule."""
|
||||
|
||||
state: ScheduleState = ScheduleState.NEVER
|
||||
cron_event: CronSim | None = field(init=False, default=None)
|
||||
|
||||
@callback
|
||||
def apply(
|
||||
self,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Apply a new schedule.
|
||||
|
||||
There are only three possible state types: never, daily, or weekly.
|
||||
"""
|
||||
if self.state is ScheduleState.NEVER:
|
||||
self._unschedule_next(manager)
|
||||
return
|
||||
|
||||
if self.state is ScheduleState.DAILY:
|
||||
self._schedule_next(CRON_PATTERN_DAILY, manager)
|
||||
else:
|
||||
self._schedule_next(
|
||||
CRON_PATTERN_WEEKLY.format(self.state.value),
|
||||
manager,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _schedule_next(
|
||||
self,
|
||||
cron_pattern: str,
|
||||
manager: BackupManager,
|
||||
) -> None:
|
||||
"""Schedule the next backup."""
|
||||
self._unschedule_next(manager)
|
||||
now = dt_util.now()
|
||||
if (cron_event := self.cron_event) is None:
|
||||
seed_time = manager.config.data.last_completed_automatic_backup or now
|
||||
cron_event = self.cron_event = CronSim(cron_pattern, seed_time)
|
||||
next_time = next(cron_event)
|
||||
|
||||
if next_time < now:
|
||||
# schedule a backup at next daily time once
|
||||
# if we missed the last scheduled backup
|
||||
cron_event = CronSim(CRON_PATTERN_DAILY, now)
|
||||
next_time = next(cron_event)
|
||||
# reseed the cron event attribute
|
||||
# add a day to the next time to avoid scheduling at the same time again
|
||||
self.cron_event = CronSim(cron_pattern, now + timedelta(days=1))
|
||||
|
||||
async def _create_backup(now: datetime) -> None:
|
||||
"""Create backup."""
|
||||
manager.remove_next_backup_event = None
|
||||
config_data = manager.config.data
|
||||
self._schedule_next(cron_pattern, manager)
|
||||
|
||||
# create the backup
|
||||
try:
|
||||
await manager.async_create_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
def to_dict(self) -> StoredBackupSchedule:
|
||||
"""Convert backup schedule to a dict."""
|
||||
return StoredBackupSchedule(state=self.state)
|
||||
|
||||
@callback
|
||||
def _unschedule_next(self, manager: BackupManager) -> None:
|
||||
"""Unschedule the next backup."""
|
||||
if (remove_next_event := manager.remove_next_backup_event) is not None:
|
||||
remove_next_event()
|
||||
manager.remove_next_backup_event = None
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class CreateBackupConfig:
|
||||
"""Represent the config for async_create_backup."""
|
||||
|
||||
agent_ids: list[str] = field(default_factory=list)
|
||||
include_addons: list[str] | None = None
|
||||
include_all_addons: bool = False
|
||||
include_database: bool = True
|
||||
include_folders: list[Folder] | None = None
|
||||
name: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
def to_dict(self) -> StoredCreateBackupConfig:
|
||||
"""Convert create backup config to a dict."""
|
||||
return {
|
||||
"agent_ids": self.agent_ids,
|
||||
"include_addons": self.include_addons,
|
||||
"include_all_addons": self.include_all_addons,
|
||||
"include_database": self.include_database,
|
||||
"include_folders": self.include_folders,
|
||||
"name": self.name,
|
||||
"password": self.password,
|
||||
}
|
||||
|
||||
|
||||
class StoredCreateBackupConfig(TypedDict):
|
||||
"""Represent the stored config for async_create_backup."""
|
||||
|
||||
agent_ids: list[str]
|
||||
include_addons: list[str] | None
|
||||
include_all_addons: bool
|
||||
include_database: bool
|
||||
include_folders: list[Folder] | None
|
||||
name: str | None
|
||||
password: str | None
|
||||
|
||||
|
||||
class CreateBackupParametersDict(TypedDict, total=False):
|
||||
"""Represent the parameters for async_create_backup."""
|
||||
|
||||
agent_ids: list[str]
|
||||
include_addons: list[str] | None
|
||||
include_all_addons: bool
|
||||
include_database: bool
|
||||
include_folders: list[Folder] | None
|
||||
name: str | None
|
||||
password: str | None
|
||||
|
||||
|
||||
async def _delete_filtered_backups(
|
||||
manager: BackupManager,
|
||||
backup_filter: Callable[[dict[str, ManagerBackup]], dict[str, ManagerBackup]],
|
||||
) -> None:
|
||||
"""Delete backups parsed with a filter.
|
||||
|
||||
:param manager: The backup manager.
|
||||
:param backup_filter: A filter that should return the backups to delete.
|
||||
"""
|
||||
backups, get_agent_errors = await manager.async_get_backups()
|
||||
if get_agent_errors:
|
||||
LOGGER.debug(
|
||||
"Error getting backups; continuing anyway: %s",
|
||||
get_agent_errors,
|
||||
)
|
||||
|
||||
# only delete backups that are created with the saved automatic settings
|
||||
backups = {
|
||||
backup_id: backup
|
||||
for backup_id, backup in backups.items()
|
||||
if backup.with_automatic_settings
|
||||
}
|
||||
|
||||
LOGGER.debug("Total automatic backups: %s", backups)
|
||||
|
||||
filtered_backups = backup_filter(backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
# always delete oldest backup first
|
||||
filtered_backups = dict(
|
||||
sorted(
|
||||
filtered_backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)
|
||||
)
|
||||
|
||||
if len(filtered_backups) >= len(backups):
|
||||
# Never delete the last backup.
|
||||
last_backup = filtered_backups.popitem()
|
||||
LOGGER.debug("Keeping the last backup: %s", last_backup)
|
||||
|
||||
LOGGER.debug("Backups to delete: %s", filtered_backups)
|
||||
|
||||
if not filtered_backups:
|
||||
return
|
||||
|
||||
backup_ids = list(filtered_backups)
|
||||
delete_results = await asyncio.gather(
|
||||
*(manager.async_delete_backup(backup_id) for backup_id in filtered_backups)
|
||||
)
|
||||
agent_errors = {
|
||||
backup_id: error
|
||||
for backup_id, error in zip(backup_ids, delete_results, strict=True)
|
||||
if error
|
||||
}
|
||||
if agent_errors:
|
||||
LOGGER.error(
|
||||
"Error deleting old copies: %s",
|
||||
agent_errors,
|
||||
)
|
||||
|
||||
|
||||
async def delete_backups_exceeding_configured_count(manager: BackupManager) -> None:
|
||||
"""Delete backups exceeding the configured retention count."""
|
||||
|
||||
def _backups_filter(
|
||||
backups: dict[str, ManagerBackup],
|
||||
) -> dict[str, ManagerBackup]:
|
||||
"""Return oldest backups more numerous than copies to delete."""
|
||||
# we need to check here since we await before
|
||||
# this filter is applied
|
||||
if manager.config.data.retention.copies is None:
|
||||
return {}
|
||||
return dict(
|
||||
sorted(
|
||||
backups.items(),
|
||||
key=lambda backup_item: backup_item[1].date,
|
||||
)[: max(len(backups) - manager.config.data.retention.copies, 0)]
|
||||
)
|
||||
|
||||
await _delete_filtered_backups(manager, _backups_filter)
|
||||
@@ -10,7 +10,6 @@ from homeassistant.util.hass_dict import HassKey
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
DOMAIN = "backup"
|
||||
DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
@@ -23,12 +22,6 @@ EXCLUDE_FROM_BACKUP = [
|
||||
"*.log.*",
|
||||
"*.log",
|
||||
"backups/*.tar",
|
||||
"tmp_backups/*.tar",
|
||||
"OZW_Log.txt",
|
||||
"tts/*",
|
||||
]
|
||||
|
||||
EXCLUDE_DATABASE_FROM_BACKUP = [
|
||||
"home-assistant_v2.db",
|
||||
"home-assistant_v2.db-wal",
|
||||
]
|
||||
|
||||
@@ -8,11 +8,10 @@ from typing import cast
|
||||
|
||||
from aiohttp import BodyPartReader
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
from aiohttp.web import FileResponse, Request, Response, StreamResponse
|
||||
from aiohttp.web import FileResponse, Request, Response
|
||||
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
@@ -28,47 +27,30 @@ def async_register_http_views(hass: HomeAssistant) -> None:
|
||||
class DownloadBackupView(HomeAssistantView):
|
||||
"""Generate backup view."""
|
||||
|
||||
url = "/api/backup/download/{backup_id}"
|
||||
url = "/api/backup/download/{slug}"
|
||||
name = "api:backup:download"
|
||||
|
||||
async def get(
|
||||
self,
|
||||
request: Request,
|
||||
backup_id: str,
|
||||
) -> StreamResponse | FileResponse | Response:
|
||||
slug: str,
|
||||
) -> FileResponse | Response:
|
||||
"""Download a backup file."""
|
||||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
try:
|
||||
agent_id = request.query.getone("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
if agent_id not in manager.backup_agents:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
agent = manager.backup_agents[agent_id]
|
||||
backup = await agent.async_get_backup(backup_id)
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
|
||||
# We don't need to check if the path exists, aiohttp.FileResponse will handle
|
||||
# that
|
||||
if backup is None:
|
||||
if backup is None or not backup.path.exists():
|
||||
return Response(status=HTTPStatus.NOT_FOUND)
|
||||
|
||||
headers = {
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
}
|
||||
if agent_id in manager.local_backup_agents:
|
||||
local_agent = manager.local_backup_agents[agent_id]
|
||||
path = local_agent.get_backup_path(backup_id)
|
||||
return FileResponse(path=path.as_posix(), headers=headers)
|
||||
|
||||
stream = await agent.async_download_backup(backup_id)
|
||||
response = StreamResponse(status=HTTPStatus.OK, headers=headers)
|
||||
await response.prepare(request)
|
||||
async for chunk in stream:
|
||||
await response.write(chunk)
|
||||
return response
|
||||
return FileResponse(
|
||||
path=backup.path.as_posix(),
|
||||
headers={
|
||||
CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class UploadBackupView(HomeAssistantView):
|
||||
@@ -80,24 +62,15 @@ class UploadBackupView(HomeAssistantView):
|
||||
@require_admin
|
||||
async def post(self, request: Request) -> Response:
|
||||
"""Upload a backup file."""
|
||||
try:
|
||||
agent_ids = request.query.getall("agent_id")
|
||||
except KeyError:
|
||||
return Response(status=HTTPStatus.BAD_REQUEST)
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
reader = await request.multipart()
|
||||
contents = cast(BodyPartReader, await reader.next())
|
||||
|
||||
try:
|
||||
await manager.async_receive_backup(contents=contents, agent_ids=agent_ids)
|
||||
await manager.async_receive_backup(contents=contents)
|
||||
except OSError as err:
|
||||
return Response(
|
||||
body=f"Can't write backup file: {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
return Response(
|
||||
body=f"Can't upload backup file: {err}",
|
||||
body=f"Can't write backup file {err}",
|
||||
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,11 @@
|
||||
{
|
||||
"domain": "backup",
|
||||
"name": "Backup",
|
||||
"after_dependencies": ["hassio"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"dependencies": ["http", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/backup",
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2024.11.0"]
|
||||
"requirements": ["securetar==2024.11.0"]
|
||||
}
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
"""Models for the backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
"""Addon information."""
|
||||
|
||||
name: str
|
||||
slug: str
|
||||
version: str
|
||||
|
||||
|
||||
class Folder(StrEnum):
|
||||
"""Folder type."""
|
||||
|
||||
SHARE = "share"
|
||||
ADDONS = "addons/local"
|
||||
SSL = "ssl"
|
||||
MEDIA = "media"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AgentBackup:
|
||||
"""Base backup class."""
|
||||
|
||||
addons: list[AddonInfo]
|
||||
backup_id: str
|
||||
date: str
|
||||
database_included: bool
|
||||
extra_metadata: dict[str, bool | str]
|
||||
folders: list[Folder]
|
||||
homeassistant_included: bool
|
||||
homeassistant_version: str | None # None if homeassistant_included is False
|
||||
name: str
|
||||
protected: bool
|
||||
size: int
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return a dict representation of this backup."""
|
||||
return asdict(self)
|
||||
|
||||
def as_frontend_json(self) -> dict:
|
||||
"""Return a dict representation of this backup for sending to frontend."""
|
||||
return {
|
||||
key: val for key, val in asdict(self).items() if key != "extra_metadata"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> Self:
|
||||
"""Create an instance from a JSON serialization."""
|
||||
return cls(
|
||||
addons=[AddonInfo(**addon) for addon in data["addons"]],
|
||||
backup_id=data["backup_id"],
|
||||
date=data["date"],
|
||||
database_included=data["database_included"],
|
||||
extra_metadata=data["extra_metadata"],
|
||||
folders=[Folder(folder) for folder in data["folders"]],
|
||||
homeassistant_included=data["homeassistant_included"],
|
||||
homeassistant_version=data["homeassistant_version"],
|
||||
name=data["name"],
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
@@ -1,52 +0,0 @@
|
||||
"""Store backup configuration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, TypedDict
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .config import StoredBackupConfig
|
||||
from .manager import BackupManager, StoredKnownBackup
|
||||
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
backups: list[StoredKnownBackup]
|
||||
config: StoredBackupConfig
|
||||
|
||||
|
||||
class BackupStore:
|
||||
"""Store backup config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
self._hass = hass
|
||||
self._manager = manager
|
||||
self._store: Store[StoredBackupData] = Store(hass, STORAGE_VERSION, STORAGE_KEY)
|
||||
|
||||
async def load(self) -> StoredBackupData | None:
|
||||
"""Load the store."""
|
||||
return await self._store.async_load()
|
||||
|
||||
@callback
|
||||
def save(self) -> None:
|
||||
"""Save config."""
|
||||
self._store.async_delay_save(self._data_to_save, STORE_DELAY_SAVE)
|
||||
|
||||
@callback
|
||||
def _data_to_save(self) -> StoredBackupData:
|
||||
"""Return data to save."""
|
||||
return {
|
||||
"backups": self._manager.known_backups.to_list(),
|
||||
"config": self._manager.config.data.to_dict(),
|
||||
}
|
||||
@@ -1,14 +1,4 @@
|
||||
{
|
||||
"issues": {
|
||||
"automatic_backup_failed_create": {
|
||||
"title": "Automatic backup could not be created",
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"create": {
|
||||
"name": "Create backup",
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
"""Local backup support for Core and Container installations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from queue import SimpleQueue
|
||||
import tarfile
|
||||
from typing import cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile
|
||||
|
||||
from homeassistant.backup_restore import password_to_key
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .const import BUF_SIZE, LOGGER
|
||||
from .models import AddonInfo, AgentBackup, Folder
|
||||
|
||||
|
||||
def make_backup_dir(path: Path) -> None:
|
||||
"""Create a backup directory if it does not exist."""
|
||||
path.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def read_backup(backup_path: Path) -> AgentBackup:
|
||||
"""Read a backup from disk."""
|
||||
|
||||
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
if not (data_file := backup_file.extractfile("./backup.json")):
|
||||
raise KeyError("backup.json not found in tar file")
|
||||
data = json_loads_object(data_file.read())
|
||||
addons = [
|
||||
AddonInfo(
|
||||
name=cast(str, addon["name"]),
|
||||
slug=cast(str, addon["slug"]),
|
||||
version=cast(str, addon["version"]),
|
||||
)
|
||||
for addon in cast(list[JsonObjectType], data.get("addons", []))
|
||||
]
|
||||
|
||||
folders = [
|
||||
Folder(folder)
|
||||
for folder in cast(list[str], data.get("folders", []))
|
||||
if folder != "homeassistant"
|
||||
]
|
||||
|
||||
homeassistant_included = False
|
||||
homeassistant_version: str | None = None
|
||||
database_included = False
|
||||
if (
|
||||
homeassistant := cast(JsonObjectType, data.get("homeassistant"))
|
||||
) and "version" in homeassistant:
|
||||
homeassistant_included = True
|
||||
homeassistant_version = cast(str, homeassistant["version"])
|
||||
database_included = not cast(
|
||||
bool, homeassistant.get("exclude_database", False)
|
||||
)
|
||||
|
||||
return AgentBackup(
|
||||
addons=addons,
|
||||
backup_id=cast(str, data["slug"]),
|
||||
database_included=database_included,
|
||||
date=cast(str, data["date"]),
|
||||
extra_metadata=cast(dict[str, bool | str], data.get("extra", {})),
|
||||
folders=folders,
|
||||
homeassistant_included=homeassistant_included,
|
||||
homeassistant_version=homeassistant_version,
|
||||
name=cast(str, data["name"]),
|
||||
protected=cast(bool, data.get("protected", False)),
|
||||
size=backup_path.stat().st_size,
|
||||
)
|
||||
|
||||
|
||||
def validate_password(path: Path, password: str | None) -> bool:
|
||||
"""Validate the password."""
|
||||
with tarfile.open(path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||
compressed = False
|
||||
ha_tar_name = "homeassistant.tar"
|
||||
try:
|
||||
ha_tar = backup_file.extractfile(ha_tar_name)
|
||||
except KeyError:
|
||||
compressed = True
|
||||
ha_tar_name = "homeassistant.tar.gz"
|
||||
try:
|
||||
ha_tar = backup_file.extractfile(ha_tar_name)
|
||||
except KeyError:
|
||||
LOGGER.error("No homeassistant.tar or homeassistant.tar.gz found")
|
||||
return False
|
||||
try:
|
||||
with SecureTarFile(
|
||||
path, # Not used
|
||||
gzip=compressed,
|
||||
key=password_to_key(password) if password is not None else None,
|
||||
mode="r",
|
||||
fileobj=ha_tar,
|
||||
):
|
||||
# If we can read the tar file, the password is correct
|
||||
return True
|
||||
except tarfile.ReadError:
|
||||
LOGGER.debug("Invalid password")
|
||||
return False
|
||||
except Exception: # noqa: BLE001
|
||||
LOGGER.exception("Unexpected error validating password")
|
||||
return False
|
||||
|
||||
|
||||
async def receive_file(
|
||||
hass: HomeAssistant, contents: aiohttp.BodyPartReader, path: Path
|
||||
) -> None:
|
||||
"""Receive a file from a stream and write it to a file."""
|
||||
queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = SimpleQueue()
|
||||
|
||||
def _sync_queue_consumer() -> None:
|
||||
with path.open("wb") as file_handle:
|
||||
while True:
|
||||
if (_chunk_future := queue.get()) is None:
|
||||
break
|
||||
_chunk, _future = _chunk_future
|
||||
if _future is not None:
|
||||
hass.loop.call_soon_threadsafe(_future.set_result, None)
|
||||
file_handle.write(_chunk)
|
||||
|
||||
fut: asyncio.Future[None] | None = None
|
||||
try:
|
||||
fut = hass.async_add_executor_job(_sync_queue_consumer)
|
||||
megabytes_sending = 0
|
||||
while chunk := await contents.read_chunk(BUF_SIZE):
|
||||
megabytes_sending += 1
|
||||
if megabytes_sending % 5 != 0:
|
||||
queue.put_nowait((chunk, None))
|
||||
continue
|
||||
|
||||
chunk_future = hass.loop.create_future()
|
||||
queue.put_nowait((chunk, chunk_future))
|
||||
await asyncio.wait(
|
||||
(fut, chunk_future),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
if fut.done():
|
||||
# The executor job failed
|
||||
break
|
||||
|
||||
queue.put_nowait(None) # terminate queue consumer
|
||||
finally:
|
||||
if fut is not None:
|
||||
await fut
|
||||
@@ -7,31 +7,22 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .config import ScheduleState
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import IncorrectPasswordError, ManagerStateEvent
|
||||
from .models import Folder
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, backup_agents_info)
|
||||
|
||||
if with_hassio:
|
||||
websocket_api.async_register_command(hass, handle_backup_end)
|
||||
websocket_api.async_register_command(hass, handle_backup_start)
|
||||
return
|
||||
|
||||
websocket_api.async_register_command(hass, handle_details)
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_remove)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@@ -44,16 +35,12 @@ async def handle_info(
|
||||
) -> None:
|
||||
"""List all stored backups."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
backups, agent_errors = await manager.async_get_backups()
|
||||
backups = await manager.async_get_backups()
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backups": [backup.as_frontend_json() for backup in backups.values()],
|
||||
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
|
||||
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
|
||||
"backups": list(backups.values()),
|
||||
"backing_up": manager.backing_up,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -62,7 +49,7 @@ async def handle_info(
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/details",
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("slug"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@@ -71,17 +58,12 @@ async def handle_details(
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get backup details for a specific backup."""
|
||||
backup, agent_errors = await hass.data[DATA_MANAGER].async_get_backup(
|
||||
msg["backup_id"]
|
||||
)
|
||||
"""Get backup details for a specific slug."""
|
||||
backup = await hass.data[DATA_MANAGER].async_get_backup(slug=msg["slug"])
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
},
|
||||
"backup": backup.as_frontend_json() if backup else None,
|
||||
"backup": backup,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -89,39 +71,26 @@ async def handle_details(
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/delete",
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("type"): "backup/remove",
|
||||
vol.Required("slug"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_delete(
|
||||
async def handle_remove(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Delete a backup."""
|
||||
agent_errors = await hass.data[DATA_MANAGER].async_delete_backup(msg["backup_id"])
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agent_errors": {
|
||||
agent_id: str(err) for agent_id, err in agent_errors.items()
|
||||
}
|
||||
},
|
||||
)
|
||||
"""Remove a backup."""
|
||||
await hass.data[DATA_MANAGER].async_remove_backup(slug=msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/restore",
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("agent_id"): str,
|
||||
vol.Optional("password"): str,
|
||||
vol.Optional("restore_addons"): [str],
|
||||
vol.Optional("restore_database", default=True): bool,
|
||||
vol.Optional("restore_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("restore_homeassistant", default=True): bool,
|
||||
vol.Required("slug"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@@ -131,36 +100,12 @@ async def handle_restore(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
try:
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(
|
||||
msg["backup_id"],
|
||||
agent_id=msg["agent_id"],
|
||||
password=msg.get("password"),
|
||||
restore_addons=msg.get("restore_addons"),
|
||||
restore_database=msg["restore_database"],
|
||||
restore_folders=msg.get("restore_folders"),
|
||||
restore_homeassistant=msg["restore_homeassistant"],
|
||||
)
|
||||
except IncorrectPasswordError:
|
||||
connection.send_error(msg["id"], "password_incorrect", "Incorrect password")
|
||||
else:
|
||||
connection.send_result(msg["id"])
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate",
|
||||
vol.Required("agent_ids"): [str],
|
||||
vol.Optional("include_addons"): [str],
|
||||
vol.Optional("include_all_addons", default=False): bool,
|
||||
vol.Optional("include_database", default=True): bool,
|
||||
vol.Optional("include_folders"): [vol.Coerce(Folder)],
|
||||
vol.Optional("include_homeassistant", default=True): bool,
|
||||
vol.Optional("name"): str,
|
||||
vol.Optional("password"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/generate"})
|
||||
@websocket_api.async_response
|
||||
async def handle_create(
|
||||
hass: HomeAssistant,
|
||||
@@ -168,46 +113,7 @@ async def handle_create(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup."""
|
||||
|
||||
backup = await hass.data[DATA_MANAGER].async_initiate_backup(
|
||||
agent_ids=msg["agent_ids"],
|
||||
include_addons=msg.get("include_addons"),
|
||||
include_all_addons=msg["include_all_addons"],
|
||||
include_database=msg["include_database"],
|
||||
include_folders=msg.get("include_folders"),
|
||||
include_homeassistant=msg["include_homeassistant"],
|
||||
name=msg.get("name"),
|
||||
password=msg.get("password"),
|
||||
)
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/generate_with_automatic_settings",
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_create_with_automatic_settings(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Generate a backup with stored settings."""
|
||||
|
||||
config_data = hass.data[DATA_MANAGER].config.data
|
||||
backup = await hass.data[DATA_MANAGER].async_initiate_backup(
|
||||
agent_ids=config_data.create_backup.agent_ids,
|
||||
include_addons=config_data.create_backup.include_addons,
|
||||
include_all_addons=config_data.create_backup.include_all_addons,
|
||||
include_database=config_data.create_backup.include_database,
|
||||
include_folders=config_data.create_backup.include_folders,
|
||||
include_homeassistant=True, # always include HA
|
||||
name=config_data.create_backup.name,
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
backup = await hass.data[DATA_MANAGER].async_create_backup()
|
||||
connection.send_result(msg["id"], backup)
|
||||
|
||||
|
||||
@@ -221,6 +127,7 @@ async def handle_backup_start(
|
||||
) -> None:
|
||||
"""Backup start notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = True
|
||||
LOGGER.debug("Backup start notification")
|
||||
|
||||
try:
|
||||
@@ -242,6 +149,7 @@ async def handle_backup_end(
|
||||
) -> None:
|
||||
"""Backup end notification."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
manager.backing_up = False
|
||||
LOGGER.debug("Backup end notification")
|
||||
|
||||
try:
|
||||
@@ -251,101 +159,3 @@ async def handle_backup_end(
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"})
|
||||
@websocket_api.async_response
|
||||
async def backup_agents_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return backup agents info."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agents": [{"agent_id": agent_id} for agent_id in manager.backup_agents],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/config/info"})
|
||||
@websocket_api.async_response
|
||||
async def handle_config_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Send the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"config": manager.config.data.to_dict(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
vol.Optional("agent_ids"): vol.All([str], vol.Unique()),
|
||||
vol.Optional("include_addons"): vol.Any(
|
||||
vol.All([str], vol.Unique()), None
|
||||
),
|
||||
vol.Optional("include_all_addons"): bool,
|
||||
vol.Optional("include_database"): bool,
|
||||
vol.Optional("include_folders"): vol.Any(
|
||||
vol.All([vol.Coerce(Folder)], vol.Unique()), None
|
||||
),
|
||||
vol.Optional("name"): vol.Any(str, None),
|
||||
vol.Optional("password"): vol.Any(str, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("retention"): vol.Schema(
|
||||
{
|
||||
vol.Optional("copies"): vol.Any(int, None),
|
||||
vol.Optional("days"): vol.Any(int, None),
|
||||
},
|
||||
),
|
||||
vol.Optional("schedule"): vol.All(str, vol.Coerce(ScheduleState)),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_config_update(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Update the stored backup config."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
changes = dict(msg)
|
||||
changes.pop("id")
|
||||
changes.pop("type")
|
||||
await manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -40,6 +40,7 @@ class BAFAutoComfort(BAFEntity, ClimateEntity):
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.FAN_ONLY]
|
||||
_attr_translation_key = "auto_comfort"
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
|
||||
@@ -46,7 +46,7 @@ class BAFFan(BAFEntity, FanEntity):
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
_attr_preset_modes = [PRESET_MODE_AUTO]
|
||||
_attr_speed_count = SPEED_COUNT
|
||||
_attr_name = None
|
||||
|
||||
@@ -8,12 +8,16 @@ from aiobafi6 import Device, OffOnAuto
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_COLOR_TEMP,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_kelvin_to_mired,
|
||||
color_temperature_mired_to_kelvin,
|
||||
)
|
||||
|
||||
from . import BAFConfigEntry
|
||||
from .entity import BAFEntity
|
||||
@@ -73,17 +77,25 @@ class BAFStandaloneLight(BAFLight):
|
||||
def __init__(self, device: Device) -> None:
|
||||
"""Init a standalone light."""
|
||||
super().__init__(device)
|
||||
self._attr_max_color_temp_kelvin = device.light_warmest_color_temperature
|
||||
self._attr_min_color_temp_kelvin = device.light_coolest_color_temperature
|
||||
self._attr_min_mireds = color_temperature_kelvin_to_mired(
|
||||
device.light_warmest_color_temperature
|
||||
)
|
||||
self._attr_max_mireds = color_temperature_kelvin_to_mired(
|
||||
device.light_coolest_color_temperature
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update attrs from device."""
|
||||
super()._async_update_attrs()
|
||||
self._attr_color_temp_kelvin = self._device.light_color_temperature
|
||||
self._attr_color_temp = color_temperature_kelvin_to_mired(
|
||||
self._device.light_color_temperature
|
||||
)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the light."""
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None:
|
||||
self._device.light_color_temperature = color_temp
|
||||
if (color_temp := kwargs.get(ATTR_COLOR_TEMP)) is not None:
|
||||
self._device.light_color_temperature = color_temperature_mired_to_kelvin(
|
||||
color_temp
|
||||
)
|
||||
await super().async_turn_on(**kwargs)
|
||||
|
||||
@@ -65,6 +65,7 @@ class BalboaClimateEntity(BalboaEntity, ClimateEntity):
|
||||
)
|
||||
_attr_translation_key = DOMAIN
|
||||
_attr_name = None
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
|
||||
def __init__(self, client: SpaClient) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
|
||||
@@ -38,7 +38,7 @@ class BalboaPumpFanEntity(BalboaEntity, FanEntity):
|
||||
| FanEntityFeature.TURN_OFF
|
||||
| FanEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
_enable_turn_on_off_backwards_compatibility = False
|
||||
_attr_translation_key = "pump"
|
||||
|
||||
def __init__(self, control: SpaControl) -> None:
|
||||
|
||||
@@ -8,7 +8,6 @@ from aiohttp.client_exceptions import (
|
||||
ClientConnectorError,
|
||||
ClientOSError,
|
||||
ServerTimeoutError,
|
||||
WSMessageTypeError,
|
||||
)
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
@@ -63,7 +62,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry)
|
||||
ServerTimeoutError,
|
||||
ApiException,
|
||||
TimeoutError,
|
||||
WSMessageTypeError,
|
||||
) as error:
|
||||
await client.close_api_client()
|
||||
raise ConfigEntryNotReady(f"Unable to connect to {entry.title}") from error
|
||||
|
||||
@@ -210,20 +210,3 @@ BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
"cd",
|
||||
"ph",
|
||||
"radio",
|
||||
"tp1",
|
||||
"tp2",
|
||||
)
|
||||
BEOLINK_JOIN_SOURCES = (
|
||||
*BEOLINK_JOIN_SOURCES_TO_UPPER,
|
||||
"beoradio",
|
||||
"deezer",
|
||||
"spotify",
|
||||
"tidal",
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user