Merge branch 'dev' into ingress_dropping_close

This commit is contained in:
J. Nick Koston 2025-01-08 07:26:39 -10:00 committed by GitHub
commit 14b16e298a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4021 changed files with 182357 additions and 43380 deletions

View File

@ -6,6 +6,7 @@ core: &core
- homeassistant/helpers/** - homeassistant/helpers/**
- homeassistant/package_constraints.txt - homeassistant/package_constraints.txt
- homeassistant/util/** - homeassistant/util/**
- mypy.ini
- pyproject.toml - pyproject.toml
- requirements.txt - requirements.txt
- setup.cfg - setup.cfg
@ -131,6 +132,7 @@ tests: &tests
- tests/components/conftest.py - tests/components/conftest.py
- tests/components/diagnostics/** - tests/components/diagnostics/**
- tests/components/history/** - tests/components/history/**
- tests/components/light/common.py
- tests/components/logbook/** - tests/components/logbook/**
- tests/components/recorder/** - tests/components/recorder/**
- tests/components/repairs/** - tests/components/repairs/**

View File

@ -10,7 +10,7 @@ on:
env: env:
BUILD_TYPE: core BUILD_TYPE: core
DEFAULT_PYTHON: "3.12" DEFAULT_PYTHON: "3.13"
PIP_TIMEOUT: 60 PIP_TIMEOUT: 60
UV_HTTP_TIMEOUT: 60 UV_HTTP_TIMEOUT: 60
UV_SYSTEM_PYTHON: "true" UV_SYSTEM_PYTHON: "true"
@ -69,7 +69,7 @@ jobs:
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations - name: Upload translations
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: translations name: translations
path: translations.tar.gz path: translations.tar.gz
@ -94,7 +94,7 @@ jobs:
- name: Download nightly wheels of frontend - name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev' if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v7
with: with:
github_token: ${{secrets.GITHUB_TOKEN}} github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/frontend repo: home-assistant/frontend
@ -105,7 +105,7 @@ jobs:
- name: Download nightly wheels of intents - name: Download nightly wheels of intents
if: needs.init.outputs.channel == 'dev' if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v6 uses: dawidd6/action-download-artifact@v7
with: with:
github_token: ${{secrets.GITHUB_TOKEN}} github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/intents-package repo: home-assistant/intents-package
@ -509,7 +509,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image - name: Build Docker image
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0 uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
with: with:
context: . # So action will not pull the repository again context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile file: ./script/hassfest/docker/Dockerfile
@ -517,12 +517,12 @@ jobs:
tags: ${{ env.HASSFEST_IMAGE_TAG }} tags: ${{ env.HASSFEST_IMAGE_TAG }}
- name: Run hassfest against core - name: Run hassfest against core
run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
- name: Push Docker image - name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push id: push
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0 uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0
with: with:
context: . # So action will not pull the repository again context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile file: ./script/hassfest/docker/Dockerfile
@ -531,7 +531,7 @@ jobs:
- name: Generate artifact attestation - name: Generate artifact attestation
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4 uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
with: with:
subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
subject-digest: ${{ steps.push.outputs.digest }} subject-digest: ${{ steps.push.outputs.digest }}

View File

@ -40,9 +40,9 @@ env:
CACHE_VERSION: 11 CACHE_VERSION: 11
UV_CACHE_VERSION: 1 UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 9 MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2024.12" HA_SHORT_VERSION: "2025.2"
DEFAULT_PYTHON: "3.12" DEFAULT_PYTHON: "3.12"
ALL_PYTHON_VERSIONS: "['3.12']" ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
# 10.3 is the oldest supported version # 10.3 is the oldest supported version
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
# 10.6 is the current long-term-support # 10.6 is the current long-term-support
@ -240,7 +240,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.1.2 uses: actions/cache@v4.2.0
with: with:
path: venv path: venv
key: >- key: >-
@ -256,7 +256,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)" uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.1.2 uses: actions/cache@v4.2.0
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@ -286,7 +286,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -295,7 +295,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@ -326,7 +326,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -335,7 +335,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@ -366,7 +366,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -375,7 +375,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@ -482,16 +482,15 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.1.2 uses: actions/cache@v4.2.0
with: with:
path: venv path: venv
lookup-only: true
key: >- key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache - name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v4.1.2 uses: actions/cache@v4.2.0
with: with:
path: ${{ env.UV_CACHE_DIR }} path: ${{ env.UV_CACHE_DIR }}
key: >- key: >-
@ -531,6 +530,26 @@ jobs:
python -m script.gen_requirements_all ci python -m script.gen_requirements_all ci
uv pip install -r requirements_all_pytest.txt -r requirements_test.txt uv pip install -r requirements_all_pytest.txt -r requirements_test.txt
uv pip install -e . --config-settings editable_mode=compat uv pip install -e . --config-settings editable_mode=compat
- name: Dump pip freeze
run: |
python -m venv venv
. venv/bin/activate
python --version
uv pip freeze >> pip_freeze.txt
- name: Upload pip_freeze artifact
uses: actions/upload-artifact@v4.5.0
with:
name: pip-freeze-${{ matrix.python-version }}
path: pip_freeze.txt
overwrite: true
- name: Remove pip_freeze
run: rm pip_freeze.txt
- name: Remove generated requirements_all
if: steps.cache-venv.outputs.cache-hit != 'true'
run: rm requirements_all_pytest.txt requirements_all_wheels_*.txt
- name: Check dirty
run: |
./script/check_dirty
hassfest: hassfest:
name: Check hassfest name: Check hassfest
@ -559,7 +578,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -592,7 +611,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -630,7 +649,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -642,7 +661,7 @@ jobs:
. venv/bin/activate . venv/bin/activate
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
- name: Upload licenses - name: Upload licenses
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: licenses-${{ github.run_number }}-${{ matrix.python-version }} name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
path: licenses-${{ matrix.python-version }}.json path: licenses-${{ matrix.python-version }}.json
@ -673,7 +692,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -720,7 +739,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -772,7 +791,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -780,7 +799,7 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore mypy cache - name: Restore mypy cache
uses: actions/cache@v4.1.2 uses: actions/cache@v4.2.0
with: with:
path: .mypy_cache path: .mypy_cache
key: >- key: >-
@ -819,6 +838,12 @@ jobs:
needs: needs:
- info - info
- base - base
- gen-requirements-all
- hassfest
- lint-other
- lint-ruff
- lint-ruff-format
- mypy
name: Split tests for full run name: Split tests for full run
steps: steps:
- name: Install additional OS dependencies - name: Install additional OS dependencies
@ -840,7 +865,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -852,7 +877,7 @@ jobs:
. venv/bin/activate . venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
- name: Upload pytest_buckets - name: Upload pytest_buckets
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: pytest_buckets name: pytest_buckets
path: pytest_buckets.txt path: pytest_buckets.txt
@ -904,7 +929,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -954,14 +979,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure' if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt path: pytest-*.txt
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }} name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml path: coverage.xml
@ -1025,7 +1050,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -1081,7 +1106,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure' if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }} steps.pytest-partial.outputs.mariadb }}
@ -1089,7 +1114,7 @@ jobs:
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: coverage-${{ matrix.python-version }}-${{ name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }} steps.pytest-partial.outputs.mariadb }}
@ -1154,7 +1179,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -1211,7 +1236,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure' if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }} steps.pytest-partial.outputs.postgresql }}
@ -1219,7 +1244,7 @@ jobs:
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: coverage-${{ matrix.python-version }}-${{ name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }} steps.pytest-partial.outputs.postgresql }}
@ -1248,12 +1273,11 @@ jobs:
pattern: coverage-* pattern: coverage-*
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'true' if: needs.info.outputs.test_full_suite == 'true'
uses: codecov/codecov-action@v4.6.0 uses: codecov/codecov-action@v5.1.2
with: with:
fail_ci_if_error: true fail_ci_if_error: true
flags: full-suite flags: full-suite
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
version: v0.6.0
pytest-partial: pytest-partial:
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
@ -1301,7 +1325,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.1.2 uses: actions/cache/restore@v4.2.0
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -1354,14 +1378,14 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure' if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt path: pytest-*.txt
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }} name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml path: coverage.xml
@ -1387,8 +1411,7 @@ jobs:
pattern: coverage-* pattern: coverage-*
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
if: needs.info.outputs.test_full_suite == 'false' if: needs.info.outputs.test_full_suite == 'false'
uses: codecov/codecov-action@v4.6.0 uses: codecov/codecov-action@v5.1.2
with: with:
fail_ci_if_error: true fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
version: v0.6.0

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v3.27.0 uses: github/codeql-action/init@v3.28.0
with: with:
languages: python languages: python
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.27.0 uses: github/codeql-action/analyze@v3.28.0
with: with:
category: "/language:python" category: "/language:python"

View File

@ -76,18 +76,37 @@ jobs:
# Use C-Extension for SQLAlchemy # Use C-Extension for SQLAlchemy
echo "REQUIRE_SQLALCHEMY_CEXT=1" echo "REQUIRE_SQLALCHEMY_CEXT=1"
# Add additional pip wheel build constraints
echo "PIP_CONSTRAINT=build_constraints.txt"
) > .env_file ) > .env_file
- name: Write pip wheel build constraints
run: |
(
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
# this caused the numpy builds to fail
# https://github.com/scikit-build/ninja-python-distributions/issues/274
echo "ninja==1.11.1.1"
) > build_constraints.txt
- name: Upload env_file - name: Upload env_file
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: env_file name: env_file
path: ./.env_file path: ./.env_file
include-hidden-files: true include-hidden-files: true
overwrite: true overwrite: true
- name: Upload build_constraints
uses: actions/upload-artifact@v4.5.0
with:
name: build_constraints
path: ./build_constraints.txt
overwrite: true
- name: Upload requirements_diff - name: Upload requirements_diff
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: requirements_diff name: requirements_diff
path: ./requirements_diff.txt path: ./requirements_diff.txt
@ -99,7 +118,7 @@ jobs:
python -m script.gen_requirements_all ci python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels - name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.4.3 uses: actions/upload-artifact@v4.5.0
with: with:
name: requirements_all_wheels name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt path: ./requirements_all_wheels_*.txt
@ -112,7 +131,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
abi: ["cp312"] abi: ["cp312", "cp313"]
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
@ -123,6 +142,11 @@ jobs:
with: with:
name: env_file name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.8
with:
name: build_constraints
- name: Download requirements_diff - name: Download requirements_diff
uses: actions/download-artifact@v4.1.8 uses: actions/download-artifact@v4.1.8
with: with:
@ -135,15 +159,15 @@ jobs:
sed -i "/uv/d" requirements_diff.txt sed -i "/uv/d" requirements_diff.txt
- name: Build wheels - name: Build wheels
uses: home-assistant/wheels@2024.07.1 uses: home-assistant/wheels@2024.11.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }} wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true env-file: true
apk: "libffi-dev;openssl-dev;yaml-dev;nasm" apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;multidict;yarl skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
constraints: "homeassistant/package_constraints.txt" constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt" requirements-diff: "requirements_diff.txt"
requirements: "requirements.txt" requirements: "requirements.txt"
@ -156,7 +180,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
abi: ["cp312"] abi: ["cp312", "cp313"]
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
@ -167,6 +191,11 @@ jobs:
with: with:
name: env_file name: env_file
- name: Download build_constraints
uses: actions/download-artifact@v4.1.8
with:
name: build_constraints
- name: Download requirements_diff - name: Download requirements_diff
uses: actions/download-artifact@v4.1.8 uses: actions/download-artifact@v4.1.8
with: with:
@ -197,69 +226,44 @@ jobs:
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
- name: Create requirements for cython<3
run: |
# Some dependencies still require 'cython<3'
# and don't yet use isolated build environments.
# Build these first.
# pydantic: https://github.com/pydantic/pydantic/issues/7689
touch requirements_old-cython.txt
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
- name: Build wheels (old cython)
uses: home-assistant/wheels@2024.07.1
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_old-cython.txt"
pip: "'cython<3'"
- name: Build wheels (part 1) - name: Build wheels (part 1)
uses: home-assistant/wheels@2024.07.1 uses: home-assistant/wheels@2024.11.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }} wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt" constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt" requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa" requirements: "requirements_all.txtaa"
- name: Build wheels (part 2) - name: Build wheels (part 2)
uses: home-assistant/wheels@2024.07.1 uses: home-assistant/wheels@2024.11.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }} wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt" constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt" requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab" requirements: "requirements_all.txtab"
- name: Build wheels (part 3) - name: Build wheels (part 3)
uses: home-assistant/wheels@2024.07.1 uses: home-assistant/wheels@2024.11.0
with: with:
abi: ${{ matrix.abi }} abi: ${{ matrix.abi }}
tag: musllinux_1_2 tag: musllinux_1_2
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }} wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
constraints: "homeassistant/package_constraints.txt" constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt" requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac" requirements: "requirements_all.txtac"

View File

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.2 rev: v0.8.6
hooks: hooks:
- id: ruff - id: ruff
args: args:
@ -12,13 +12,13 @@ repos:
hooks: hooks:
- id: codespell - id: codespell
args: args:
- --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn - --ignore-words-list=aiport,astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn
- --skip="./.*,*.csv,*.json,*.ambr" - --skip="./.*,*.csv,*.json,*.ambr"
- --quiet-level=2 - --quiet-level=2
exclude_types: [csv, json, html] exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/ exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0 rev: v5.0.0
hooks: hooks:
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
stages: [manual] stages: [manual]
@ -83,14 +83,14 @@ repos:
pass_filenames: false pass_filenames: false
language: script language: script
types: [text] types: [text]
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$ files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$
- id: hassfest-metadata - id: hassfest-metadata
name: hassfest-metadata name: hassfest-metadata
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker
pass_filenames: false pass_filenames: false
language: script language: script
types: [text] types: [text]
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$ files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
- id: hassfest-mypy-config - id: hassfest-mypy-config
name: hassfest-mypy-config name: hassfest-mypy-config
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config

View File

@ -41,6 +41,7 @@ homeassistant.util.unit_system
# --- Add components below this line --- # --- Add components below this line ---
homeassistant.components homeassistant.components
homeassistant.components.abode.* homeassistant.components.abode.*
homeassistant.components.acaia.*
homeassistant.components.accuweather.* homeassistant.components.accuweather.*
homeassistant.components.acer_projector.* homeassistant.components.acer_projector.*
homeassistant.components.acmeda.* homeassistant.components.acmeda.*
@ -136,6 +137,7 @@ homeassistant.components.co2signal.*
homeassistant.components.command_line.* homeassistant.components.command_line.*
homeassistant.components.config.* homeassistant.components.config.*
homeassistant.components.configurator.* homeassistant.components.configurator.*
homeassistant.components.cookidoo.*
homeassistant.components.counter.* homeassistant.components.counter.*
homeassistant.components.cover.* homeassistant.components.cover.*
homeassistant.components.cpuspeed.* homeassistant.components.cpuspeed.*
@ -168,6 +170,7 @@ homeassistant.components.easyenergy.*
homeassistant.components.ecovacs.* homeassistant.components.ecovacs.*
homeassistant.components.ecowitt.* homeassistant.components.ecowitt.*
homeassistant.components.efergy.* homeassistant.components.efergy.*
homeassistant.components.eheimdigital.*
homeassistant.components.electrasmart.* homeassistant.components.electrasmart.*
homeassistant.components.electric_kiwi.* homeassistant.components.electric_kiwi.*
homeassistant.components.elevenlabs.* homeassistant.components.elevenlabs.*
@ -268,6 +271,7 @@ homeassistant.components.ios.*
homeassistant.components.iotty.* homeassistant.components.iotty.*
homeassistant.components.ipp.* homeassistant.components.ipp.*
homeassistant.components.iqvia.* homeassistant.components.iqvia.*
homeassistant.components.iron_os.*
homeassistant.components.islamic_prayer_times.* homeassistant.components.islamic_prayer_times.*
homeassistant.components.isy994.* homeassistant.components.isy994.*
homeassistant.components.jellyfin.* homeassistant.components.jellyfin.*
@ -307,6 +311,8 @@ homeassistant.components.manual.*
homeassistant.components.mastodon.* homeassistant.components.mastodon.*
homeassistant.components.matrix.* homeassistant.components.matrix.*
homeassistant.components.matter.* homeassistant.components.matter.*
homeassistant.components.mcp_server.*
homeassistant.components.mealie.*
homeassistant.components.media_extractor.* homeassistant.components.media_extractor.*
homeassistant.components.media_player.* homeassistant.components.media_player.*
homeassistant.components.media_source.* homeassistant.components.media_source.*
@ -357,13 +363,17 @@ homeassistant.components.openuv.*
homeassistant.components.oralb.* homeassistant.components.oralb.*
homeassistant.components.otbr.* homeassistant.components.otbr.*
homeassistant.components.overkiz.* homeassistant.components.overkiz.*
homeassistant.components.overseerr.*
homeassistant.components.p1_monitor.* homeassistant.components.p1_monitor.*
homeassistant.components.pandora.*
homeassistant.components.panel_custom.* homeassistant.components.panel_custom.*
homeassistant.components.peblar.*
homeassistant.components.peco.* homeassistant.components.peco.*
homeassistant.components.persistent_notification.* homeassistant.components.persistent_notification.*
homeassistant.components.pi_hole.* homeassistant.components.pi_hole.*
homeassistant.components.ping.* homeassistant.components.ping.*
homeassistant.components.plugwise.* homeassistant.components.plugwise.*
homeassistant.components.powerfox.*
homeassistant.components.powerwall.* homeassistant.components.powerwall.*
homeassistant.components.private_ble_device.* homeassistant.components.private_ble_device.*
homeassistant.components.prometheus.* homeassistant.components.prometheus.*
@ -373,6 +383,7 @@ homeassistant.components.pure_energie.*
homeassistant.components.purpleair.* homeassistant.components.purpleair.*
homeassistant.components.pushbullet.* homeassistant.components.pushbullet.*
homeassistant.components.pvoutput.* homeassistant.components.pvoutput.*
homeassistant.components.python_script.*
homeassistant.components.qnap_qsw.* homeassistant.components.qnap_qsw.*
homeassistant.components.rabbitair.* homeassistant.components.rabbitair.*
homeassistant.components.radarr.* homeassistant.components.radarr.*
@ -385,6 +396,7 @@ homeassistant.components.recollect_waste.*
homeassistant.components.recorder.* homeassistant.components.recorder.*
homeassistant.components.remote.* homeassistant.components.remote.*
homeassistant.components.renault.* homeassistant.components.renault.*
homeassistant.components.reolink.*
homeassistant.components.repairs.* homeassistant.components.repairs.*
homeassistant.components.rest.* homeassistant.components.rest.*
homeassistant.components.rest_command.* homeassistant.components.rest_command.*
@ -399,11 +411,13 @@ homeassistant.components.romy.*
homeassistant.components.rpi_power.* homeassistant.components.rpi_power.*
homeassistant.components.rss_feed_template.* homeassistant.components.rss_feed_template.*
homeassistant.components.rtsp_to_webrtc.* homeassistant.components.rtsp_to_webrtc.*
homeassistant.components.russound_rio.*
homeassistant.components.ruuvi_gateway.* homeassistant.components.ruuvi_gateway.*
homeassistant.components.ruuvitag_ble.* homeassistant.components.ruuvitag_ble.*
homeassistant.components.samsungtv.* homeassistant.components.samsungtv.*
homeassistant.components.scene.* homeassistant.components.scene.*
homeassistant.components.schedule.* homeassistant.components.schedule.*
homeassistant.components.schlage.*
homeassistant.components.scrape.* homeassistant.components.scrape.*
homeassistant.components.script.* homeassistant.components.script.*
homeassistant.components.search.* homeassistant.components.search.*
@ -436,7 +450,7 @@ homeassistant.components.ssdp.*
homeassistant.components.starlink.* homeassistant.components.starlink.*
homeassistant.components.statistics.* homeassistant.components.statistics.*
homeassistant.components.steamist.* homeassistant.components.steamist.*
homeassistant.components.stookalert.* homeassistant.components.stookwijzer.*
homeassistant.components.stream.* homeassistant.components.stream.*
homeassistant.components.streamlabswater.* homeassistant.components.streamlabswater.*
homeassistant.components.stt.* homeassistant.components.stt.*

42
.vscode/tasks.json vendored
View File

@ -16,7 +16,7 @@
{ {
"label": "Pytest", "label": "Pytest",
"type": "shell", "type": "shell",
"command": "python3 -m pytest --timeout=10 tests", "command": "${command:python.interpreterPath} -m pytest --timeout=10 tests",
"dependsOn": ["Install all Test Requirements"], "dependsOn": ["Install all Test Requirements"],
"group": { "group": {
"kind": "test", "kind": "test",
@ -31,7 +31,7 @@
{ {
"label": "Pytest (changed tests only)", "label": "Pytest (changed tests only)",
"type": "shell", "type": "shell",
"command": "python3 -m pytest --timeout=10 --picked", "command": "${command:python.interpreterPath} -m pytest --timeout=10 --picked",
"group": { "group": {
"kind": "test", "kind": "test",
"isDefault": true "isDefault": true
@ -56,6 +56,20 @@
}, },
"problemMatcher": [] "problemMatcher": []
}, },
{
"label": "Pre-commit",
"type": "shell",
"command": "pre-commit run --show-diff-on-failure",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{ {
"label": "Pylint", "label": "Pylint",
"type": "shell", "type": "shell",
@ -75,7 +89,23 @@
"label": "Code Coverage", "label": "Code Coverage",
"detail": "Generate code coverage report for a given integration.", "detail": "Generate code coverage report for a given integration.",
"type": "shell", "type": "shell",
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto", "command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
"dependsOn": ["Compile English translations"],
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Update syrupy snapshots",
"detail": "Update syrupy snapshots for a given integration.",
"type": "shell",
"command": "${command:python.interpreterPath} -m pytest ./tests/components/${input:integrationName} --snapshot-update",
"dependsOn": ["Compile English translations"], "dependsOn": ["Compile English translations"],
"group": { "group": {
"kind": "test", "kind": "test",
@ -133,7 +163,7 @@
"label": "Compile English translations", "label": "Compile English translations",
"detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.", "detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.",
"type": "shell", "type": "shell",
"command": "python3 -m script.translations develop --all", "command": "${command:python.interpreterPath} -m script.translations develop --all",
"group": { "group": {
"kind": "build", "kind": "build",
"isDefault": true "isDefault": true
@ -143,7 +173,7 @@
"label": "Run scaffold", "label": "Run scaffold",
"detail": "Add new functionality to a integration using a scaffold.", "detail": "Add new functionality to a integration using a scaffold.",
"type": "shell", "type": "shell",
"command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}", "command": "${command:python.interpreterPath} -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
"group": { "group": {
"kind": "build", "kind": "build",
"isDefault": true "isDefault": true
@ -153,7 +183,7 @@
"label": "Create new integration", "label": "Create new integration",
"detail": "Use the scaffold to create a new integration.", "detail": "Use the scaffold to create a new integration.",
"type": "shell", "type": "shell",
"command": "python3 -m script.scaffold integration", "command": "${command:python.interpreterPath} -m script.scaffold integration",
"group": { "group": {
"kind": "build", "kind": "build",
"isDefault": true "isDefault": true

View File

@ -40,6 +40,8 @@ build.json @home-assistant/supervisor
# Integrations # Integrations
/homeassistant/components/abode/ @shred86 /homeassistant/components/abode/ @shred86
/tests/components/abode/ @shred86 /tests/components/abode/ @shred86
/homeassistant/components/acaia/ @zweckj
/tests/components/acaia/ @zweckj
/homeassistant/components/accuweather/ @bieniu /homeassistant/components/accuweather/ @bieniu
/tests/components/accuweather/ @bieniu /tests/components/accuweather/ @bieniu
/homeassistant/components/acmeda/ @atmurray /homeassistant/components/acmeda/ @atmurray
@ -282,6 +284,8 @@ build.json @home-assistant/supervisor
/tests/components/control4/ @lawtancool /tests/components/control4/ @lawtancool
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam /homeassistant/components/conversation/ @home-assistant/core @synesthesiam
/tests/components/conversation/ @home-assistant/core @synesthesiam /tests/components/conversation/ @home-assistant/core @synesthesiam
/homeassistant/components/cookidoo/ @miaucl
/tests/components/cookidoo/ @miaucl
/homeassistant/components/coolmaster/ @OnFreund /homeassistant/components/coolmaster/ @OnFreund
/tests/components/coolmaster/ @OnFreund /tests/components/coolmaster/ @OnFreund
/homeassistant/components/counter/ @fabaff /homeassistant/components/counter/ @fabaff
@ -383,6 +387,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/efergy/ @tkdrob /homeassistant/components/efergy/ @tkdrob
/tests/components/efergy/ @tkdrob /tests/components/efergy/ @tkdrob
/homeassistant/components/egardia/ @jeroenterheerdt /homeassistant/components/egardia/ @jeroenterheerdt
/homeassistant/components/eheimdigital/ @autinerd
/tests/components/eheimdigital/ @autinerd
/homeassistant/components/electrasmart/ @jafar-atili /homeassistant/components/electrasmart/ @jafar-atili
/tests/components/electrasmart/ @jafar-atili /tests/components/electrasmart/ @jafar-atili
/homeassistant/components/electric_kiwi/ @mikey0000 /homeassistant/components/electric_kiwi/ @mikey0000
@ -572,8 +578,8 @@ build.json @home-assistant/supervisor
/tests/components/google_tasks/ @allenporter /tests/components/google_tasks/ @allenporter
/homeassistant/components/google_travel_time/ @eifinger /homeassistant/components/google_travel_time/ @eifinger
/tests/components/google_travel_time/ @eifinger /tests/components/google_travel_time/ @eifinger
/homeassistant/components/govee_ble/ @bdraco @PierreAronnax /homeassistant/components/govee_ble/ @bdraco
/tests/components/govee_ble/ @bdraco @PierreAronnax /tests/components/govee_ble/ @bdraco
/homeassistant/components/govee_light_local/ @Galorhallen /homeassistant/components/govee_light_local/ @Galorhallen
/tests/components/govee_light_local/ @Galorhallen /tests/components/govee_light_local/ @Galorhallen
/homeassistant/components/gpsd/ @fabaff @jrieger /homeassistant/components/gpsd/ @fabaff @jrieger
@ -586,8 +592,8 @@ build.json @home-assistant/supervisor
/tests/components/group/ @home-assistant/core /tests/components/group/ @home-assistant/core
/homeassistant/components/guardian/ @bachya /homeassistant/components/guardian/ @bachya
/tests/components/guardian/ @bachya /tests/components/guardian/ @bachya
/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r /homeassistant/components/habitica/ @tr4nt0r
/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r /tests/components/habitica/ @tr4nt0r
/homeassistant/components/hardkernel/ @home-assistant/core /homeassistant/components/hardkernel/ @home-assistant/core
/tests/components/hardkernel/ @home-assistant/core /tests/components/hardkernel/ @home-assistant/core
/homeassistant/components/hardware/ @home-assistant/core /homeassistant/components/hardware/ @home-assistant/core
@ -631,6 +637,8 @@ build.json @home-assistant/supervisor
/tests/components/homeassistant_sky_connect/ @home-assistant/core /tests/components/homeassistant_sky_connect/ @home-assistant/core
/homeassistant/components/homeassistant_yellow/ @home-assistant/core /homeassistant/components/homeassistant_yellow/ @home-assistant/core
/tests/components/homeassistant_yellow/ @home-assistant/core /tests/components/homeassistant_yellow/ @home-assistant/core
/homeassistant/components/homee/ @Taraman17
/tests/components/homee/ @Taraman17
/homeassistant/components/homekit/ @bdraco /homeassistant/components/homekit/ @bdraco
/tests/components/homekit/ @bdraco /tests/components/homekit/ @bdraco
/homeassistant/components/homekit_controller/ @Jc2k @bdraco /homeassistant/components/homekit_controller/ @Jc2k @bdraco
@ -680,6 +688,8 @@ build.json @home-assistant/supervisor
/tests/components/icloud/ @Quentame @nzapponi /tests/components/icloud/ @Quentame @nzapponi
/homeassistant/components/idasen_desk/ @abmantis /homeassistant/components/idasen_desk/ @abmantis
/tests/components/idasen_desk/ @abmantis /tests/components/idasen_desk/ @abmantis
/homeassistant/components/igloohome/ @keithle888
/tests/components/igloohome/ @keithle888
/homeassistant/components/ign_sismologia/ @exxamalte /homeassistant/components/ign_sismologia/ @exxamalte
/tests/components/ign_sismologia/ @exxamalte /tests/components/ign_sismologia/ @exxamalte
/homeassistant/components/image/ @home-assistant/core /homeassistant/components/image/ @home-assistant/core
@ -725,8 +735,8 @@ build.json @home-assistant/supervisor
/tests/components/ios/ @robbiet480 /tests/components/ios/ @robbiet480
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard /homeassistant/components/iotawatt/ @gtdiehl @jyavenard
/tests/components/iotawatt/ @gtdiehl @jyavenard /tests/components/iotawatt/ @gtdiehl @jyavenard
/homeassistant/components/iotty/ @pburgio @shapournemati-iotty /homeassistant/components/iotty/ @shapournemati-iotty
/tests/components/iotty/ @pburgio @shapournemati-iotty /tests/components/iotty/ @shapournemati-iotty
/homeassistant/components/iperf3/ @rohankapoorcom /homeassistant/components/iperf3/ @rohankapoorcom
/homeassistant/components/ipma/ @dgomes /homeassistant/components/ipma/ @dgomes
/tests/components/ipma/ @dgomes /tests/components/ipma/ @dgomes
@ -751,6 +761,8 @@ build.json @home-assistant/supervisor
/tests/components/ista_ecotrend/ @tr4nt0r /tests/components/ista_ecotrend/ @tr4nt0r
/homeassistant/components/isy994/ @bdraco @shbatm /homeassistant/components/isy994/ @bdraco @shbatm
/tests/components/isy994/ @bdraco @shbatm /tests/components/isy994/ @bdraco @shbatm
/homeassistant/components/ituran/ @shmuelzon
/tests/components/ituran/ @shmuelzon
/homeassistant/components/izone/ @Swamp-Ig /homeassistant/components/izone/ @Swamp-Ig
/tests/components/izone/ @Swamp-Ig /tests/components/izone/ @Swamp-Ig
/homeassistant/components/jellyfin/ @j-stienstra @ctalkington /homeassistant/components/jellyfin/ @j-stienstra @ctalkington
@ -879,6 +891,8 @@ build.json @home-assistant/supervisor
/tests/components/matrix/ @PaarthShah /tests/components/matrix/ @PaarthShah
/homeassistant/components/matter/ @home-assistant/matter /homeassistant/components/matter/ @home-assistant/matter
/tests/components/matter/ @home-assistant/matter /tests/components/matter/ @home-assistant/matter
/homeassistant/components/mcp_server/ @allenporter
/tests/components/mcp_server/ @allenporter
/homeassistant/components/mealie/ @joostlek @andrew-codechimp /homeassistant/components/mealie/ @joostlek @andrew-codechimp
/tests/components/mealie/ @joostlek @andrew-codechimp /tests/components/mealie/ @joostlek @andrew-codechimp
/homeassistant/components/meater/ @Sotolotl @emontnemery /homeassistant/components/meater/ @Sotolotl @emontnemery
@ -972,8 +986,6 @@ build.json @home-assistant/supervisor
/tests/components/nanoleaf/ @milanmeu @joostlek /tests/components/nanoleaf/ @milanmeu @joostlek
/homeassistant/components/nasweb/ @nasWebio /homeassistant/components/nasweb/ @nasWebio
/tests/components/nasweb/ @nasWebio /tests/components/nasweb/ @nasWebio
/homeassistant/components/neato/ @Santobert
/tests/components/neato/ @Santobert
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM /homeassistant/components/nederlandse_spoorwegen/ @YarmoM
/homeassistant/components/ness_alarm/ @nickw444 /homeassistant/components/ness_alarm/ @nickw444
/tests/components/ness_alarm/ @nickw444 /tests/components/ness_alarm/ @nickw444
@ -1004,6 +1016,8 @@ build.json @home-assistant/supervisor
/tests/components/nice_go/ @IceBotYT /tests/components/nice_go/ @IceBotYT
/homeassistant/components/nightscout/ @marciogranzotto /homeassistant/components/nightscout/ @marciogranzotto
/tests/components/nightscout/ @marciogranzotto /tests/components/nightscout/ @marciogranzotto
/homeassistant/components/niko_home_control/ @VandeurenGlenn
/tests/components/niko_home_control/ @VandeurenGlenn
/homeassistant/components/nilu/ @hfurubotten /homeassistant/components/nilu/ @hfurubotten
/homeassistant/components/nina/ @DeerMaximum /homeassistant/components/nina/ @DeerMaximum
/tests/components/nina/ @DeerMaximum /tests/components/nina/ @DeerMaximum
@ -1045,6 +1059,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/octoprint/ @rfleming71 /homeassistant/components/octoprint/ @rfleming71
/tests/components/octoprint/ @rfleming71 /tests/components/octoprint/ @rfleming71
/homeassistant/components/ohmconnect/ @robbiet480 /homeassistant/components/ohmconnect/ @robbiet480
/homeassistant/components/ohme/ @dan-r
/tests/components/ohme/ @dan-r
/homeassistant/components/ollama/ @synesthesiam /homeassistant/components/ollama/ @synesthesiam
/tests/components/ollama/ @synesthesiam /tests/components/ollama/ @synesthesiam
/homeassistant/components/ombi/ @larssont /homeassistant/components/ombi/ @larssont
@ -1056,8 +1072,8 @@ build.json @home-assistant/supervisor
/tests/components/ondilo_ico/ @JeromeHXP /tests/components/ondilo_ico/ @JeromeHXP
/homeassistant/components/onewire/ @garbled1 @epenet /homeassistant/components/onewire/ @garbled1 @epenet
/tests/components/onewire/ @garbled1 @epenet /tests/components/onewire/ @garbled1 @epenet
/homeassistant/components/onkyo/ @arturpragacz /homeassistant/components/onkyo/ @arturpragacz @eclair4151
/tests/components/onkyo/ @arturpragacz /tests/components/onkyo/ @arturpragacz @eclair4151
/homeassistant/components/onvif/ @hunterjm /homeassistant/components/onvif/ @hunterjm
/tests/components/onvif/ @hunterjm /tests/components/onvif/ @hunterjm
/homeassistant/components/open_meteo/ @frenck /homeassistant/components/open_meteo/ @frenck
@ -1093,8 +1109,10 @@ build.json @home-assistant/supervisor
/tests/components/otbr/ @home-assistant/core /tests/components/otbr/ @home-assistant/core
/homeassistant/components/ourgroceries/ @OnFreund /homeassistant/components/ourgroceries/ @OnFreund
/tests/components/ourgroceries/ @OnFreund /tests/components/ourgroceries/ @OnFreund
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14 /homeassistant/components/overkiz/ @imicknl
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14 /tests/components/overkiz/ @imicknl
/homeassistant/components/overseerr/ @joostlek
/tests/components/overseerr/ @joostlek
/homeassistant/components/ovo_energy/ @timmo001 /homeassistant/components/ovo_energy/ @timmo001
/tests/components/ovo_energy/ @timmo001 /tests/components/ovo_energy/ @timmo001
/homeassistant/components/p1_monitor/ @klaasnicolaas /homeassistant/components/p1_monitor/ @klaasnicolaas
@ -1103,6 +1121,8 @@ build.json @home-assistant/supervisor
/tests/components/palazzetti/ @dotvav /tests/components/palazzetti/ @dotvav
/homeassistant/components/panel_custom/ @home-assistant/frontend /homeassistant/components/panel_custom/ @home-assistant/frontend
/tests/components/panel_custom/ @home-assistant/frontend /tests/components/panel_custom/ @home-assistant/frontend
/homeassistant/components/peblar/ @frenck
/tests/components/peblar/ @frenck
/homeassistant/components/peco/ @IceBotYT /homeassistant/components/peco/ @IceBotYT
/tests/components/peco/ @IceBotYT /tests/components/peco/ @IceBotYT
/homeassistant/components/pegel_online/ @mib1185 /homeassistant/components/pegel_online/ @mib1185
@ -1123,14 +1143,16 @@ build.json @home-assistant/supervisor
/tests/components/plaato/ @JohNan /tests/components/plaato/ @JohNan
/homeassistant/components/plex/ @jjlawren /homeassistant/components/plex/ @jjlawren
/tests/components/plex/ @jjlawren /tests/components/plex/ @jjlawren
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck /homeassistant/components/plugwise/ @CoMPaTech @bouwew
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck /tests/components/plugwise/ @CoMPaTech @bouwew
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa /homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
/tests/components/plum_lightpad/ @ColinHarrington @prystupa /tests/components/plum_lightpad/ @ColinHarrington @prystupa
/homeassistant/components/point/ @fredrike /homeassistant/components/point/ @fredrike
/tests/components/point/ @fredrike /tests/components/point/ @fredrike
/homeassistant/components/poolsense/ @haemishkyd /homeassistant/components/poolsense/ @haemishkyd
/tests/components/poolsense/ @haemishkyd /tests/components/poolsense/ @haemishkyd
/homeassistant/components/powerfox/ @klaasnicolaas
/tests/components/powerfox/ @klaasnicolaas
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson /homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
/tests/components/powerwall/ @bdraco @jrester @daniel-simpson /tests/components/powerwall/ @bdraco @jrester @daniel-simpson
/homeassistant/components/private_ble_device/ @Jc2k /homeassistant/components/private_ble_device/ @Jc2k
@ -1344,6 +1366,8 @@ build.json @home-assistant/supervisor
/tests/components/siren/ @home-assistant/core @raman325 /tests/components/siren/ @home-assistant/core @raman325
/homeassistant/components/sisyphus/ @jkeljo /homeassistant/components/sisyphus/ @jkeljo
/homeassistant/components/sky_hub/ @rogerselwyn /homeassistant/components/sky_hub/ @rogerselwyn
/homeassistant/components/sky_remote/ @dunnmj @saty9
/tests/components/sky_remote/ @dunnmj @saty9
/homeassistant/components/skybell/ @tkdrob /homeassistant/components/skybell/ @tkdrob
/tests/components/skybell/ @tkdrob /tests/components/skybell/ @tkdrob
/homeassistant/components/slack/ @tkdrob @fletcherau /homeassistant/components/slack/ @tkdrob @fletcherau
@ -1351,6 +1375,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/sleepiq/ @mfugate1 @kbickar /homeassistant/components/sleepiq/ @mfugate1 @kbickar
/tests/components/sleepiq/ @mfugate1 @kbickar /tests/components/sleepiq/ @mfugate1 @kbickar
/homeassistant/components/slide/ @ualex73 /homeassistant/components/slide/ @ualex73
/homeassistant/components/slide_local/ @dontinelli
/tests/components/slide_local/ @dontinelli
/homeassistant/components/slimproto/ @marcelveldt /homeassistant/components/slimproto/ @marcelveldt
/tests/components/slimproto/ @marcelveldt /tests/components/slimproto/ @marcelveldt
/homeassistant/components/sma/ @kellerza @rklomp /homeassistant/components/sma/ @kellerza @rklomp
@ -1409,15 +1435,13 @@ build.json @home-assistant/supervisor
/tests/components/starline/ @anonym-tsk /tests/components/starline/ @anonym-tsk
/homeassistant/components/starlink/ @boswelja /homeassistant/components/starlink/ @boswelja
/tests/components/starlink/ @boswelja /tests/components/starlink/ @boswelja
/homeassistant/components/statistics/ @ThomDietrich /homeassistant/components/statistics/ @ThomDietrich @gjohansson-ST
/tests/components/statistics/ @ThomDietrich /tests/components/statistics/ @ThomDietrich @gjohansson-ST
/homeassistant/components/steam_online/ @tkdrob /homeassistant/components/steam_online/ @tkdrob
/tests/components/steam_online/ @tkdrob /tests/components/steam_online/ @tkdrob
/homeassistant/components/steamist/ @bdraco /homeassistant/components/steamist/ @bdraco
/tests/components/steamist/ @bdraco /tests/components/steamist/ @bdraco
/homeassistant/components/stiebel_eltron/ @fucm /homeassistant/components/stiebel_eltron/ @fucm
/homeassistant/components/stookalert/ @fwestenberg @frenck
/tests/components/stookalert/ @fwestenberg @frenck
/homeassistant/components/stookwijzer/ @fwestenberg /homeassistant/components/stookwijzer/ @fwestenberg
/tests/components/stookwijzer/ @fwestenberg /tests/components/stookwijzer/ @fwestenberg
/homeassistant/components/stream/ @hunterjm @uvjustin @allenporter /homeassistant/components/stream/ @hunterjm @uvjustin @allenporter
@ -1462,8 +1486,8 @@ build.json @home-assistant/supervisor
/tests/components/system_bridge/ @timmo001 /tests/components/system_bridge/ @timmo001
/homeassistant/components/systemmonitor/ @gjohansson-ST /homeassistant/components/systemmonitor/ @gjohansson-ST
/tests/components/systemmonitor/ @gjohansson-ST /tests/components/systemmonitor/ @gjohansson-ST
/homeassistant/components/tado/ @chiefdragon @erwindouna /homeassistant/components/tado/ @erwindouna
/tests/components/tado/ @chiefdragon @erwindouna /tests/components/tado/ @erwindouna
/homeassistant/components/tag/ @balloob @dmulcahey /homeassistant/components/tag/ @balloob @dmulcahey
/tests/components/tag/ @balloob @dmulcahey /tests/components/tag/ @balloob @dmulcahey
/homeassistant/components/tailscale/ @frenck /homeassistant/components/tailscale/ @frenck
@ -1485,8 +1509,8 @@ build.json @home-assistant/supervisor
/tests/components/tedee/ @patrickhilker @zweckj /tests/components/tedee/ @patrickhilker @zweckj
/homeassistant/components/tellduslive/ @fredrike /homeassistant/components/tellduslive/ @fredrike
/tests/components/tellduslive/ @fredrike /tests/components/tellduslive/ @fredrike
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core /homeassistant/components/template/ @PhracturedBlue @home-assistant/core
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core /tests/components/template/ @PhracturedBlue @home-assistant/core
/homeassistant/components/tesla_fleet/ @Bre77 /homeassistant/components/tesla_fleet/ @Bre77
/tests/components/tesla_fleet/ @Bre77 /tests/components/tesla_fleet/ @Bre77
/homeassistant/components/tesla_wall_connector/ @einarhauks /homeassistant/components/tesla_wall_connector/ @einarhauks
@ -1557,8 +1581,8 @@ build.json @home-assistant/supervisor
/tests/components/triggercmd/ @rvmey /tests/components/triggercmd/ @rvmey
/homeassistant/components/tts/ @home-assistant/core /homeassistant/components/tts/ @home-assistant/core
/tests/components/tts/ @home-assistant/core /tests/components/tts/ @home-assistant/core
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck /homeassistant/components/tuya/ @Tuya @zlinoliver
/tests/components/tuya/ @Tuya @zlinoliver @frenck /tests/components/tuya/ @Tuya @zlinoliver
/homeassistant/components/twentemilieu/ @frenck /homeassistant/components/twentemilieu/ @frenck
/tests/components/twentemilieu/ @frenck /tests/components/twentemilieu/ @frenck
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen /homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
@ -1571,6 +1595,8 @@ build.json @home-assistant/supervisor
/tests/components/unifi/ @Kane610 /tests/components/unifi/ @Kane610
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL /homeassistant/components/unifi_direct/ @tofuSCHNITZEL
/homeassistant/components/unifiled/ @florisvdk /homeassistant/components/unifiled/ @florisvdk
/homeassistant/components/unifiprotect/ @RaHehl
/tests/components/unifiprotect/ @RaHehl
/homeassistant/components/upb/ @gwww /homeassistant/components/upb/ @gwww
/tests/components/upb/ @gwww /tests/components/upb/ @gwww
/homeassistant/components/upc_connect/ @pvizeli @fabaff /homeassistant/components/upc_connect/ @pvizeli @fabaff
@ -1638,6 +1664,8 @@ build.json @home-assistant/supervisor
/tests/components/waqi/ @joostlek /tests/components/waqi/ @joostlek
/homeassistant/components/water_heater/ @home-assistant/core /homeassistant/components/water_heater/ @home-assistant/core
/tests/components/water_heater/ @home-assistant/core /tests/components/water_heater/ @home-assistant/core
/homeassistant/components/watergate/ @adam-the-hero
/tests/components/watergate/ @adam-the-hero
/homeassistant/components/watson_tts/ @rutkai /homeassistant/components/watson_tts/ @rutkai
/homeassistant/components/watttime/ @bachya /homeassistant/components/watttime/ @bachya
/tests/components/watttime/ @bachya /tests/components/watttime/ @bachya
@ -1722,6 +1750,7 @@ build.json @home-assistant/supervisor
/tests/components/youless/ @gjong /tests/components/youless/ @gjong
/homeassistant/components/youtube/ @joostlek /homeassistant/components/youtube/ @joostlek
/tests/components/youtube/ @joostlek /tests/components/youtube/ @joostlek
/homeassistant/components/zabbix/ @kruton
/homeassistant/components/zamg/ @killer0071234 /homeassistant/components/zamg/ @killer0071234
/tests/components/zamg/ @killer0071234 /tests/components/zamg/ @killer0071234
/homeassistant/components/zengge/ @emontnemery /homeassistant/components/zengge/ @emontnemery

View File

@ -13,7 +13,7 @@ ENV \
ARG QEMU_CPU ARG QEMU_CPU
# Install uv # Install uv
RUN pip3 install uv==0.5.0 RUN pip3 install uv==0.5.8
WORKDIR /usr/src WORKDIR /usr/src
@ -55,7 +55,7 @@ RUN \
"armv7") go2rtc_suffix='arm' ;; \ "armv7") go2rtc_suffix='arm' ;; \
*) go2rtc_suffix=${BUILD_ARCH} ;; \ *) go2rtc_suffix=${BUILD_ARCH} ;; \
esac \ esac \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.7/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& chmod +x /bin/go2rtc \ && chmod +x /bin/go2rtc \
# Verify go2rtc can be executed # Verify go2rtc can be executed
&& go2rtc --version && go2rtc --version

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/devcontainers/python:1-3.12 FROM mcr.microsoft.com/devcontainers/python:1-3.13
SHELL ["/bin/bash", "-o", "pipefail", "-c"] SHELL ["/bin/bash", "-o", "pipefail", "-c"]
@ -35,6 +35,9 @@ RUN \
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Add go2rtc binary
COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc
# Install uv # Install uv
RUN pip3 install uv RUN pip3 install uv

View File

@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-homeassistant image: ghcr.io/home-assistant/{arch}-homeassistant
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1 aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.12.0
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1 armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.12.0
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1 armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.12.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1 amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.12.0
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1 i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.12.0
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io

View File

@ -115,7 +115,7 @@ class AuthManagerFlowManager(
*, *,
context: AuthFlowContext | None = None, context: AuthFlowContext | None = None,
data: dict[str, Any] | None = None, data: dict[str, Any] | None = None,
) -> LoginFlow: ) -> LoginFlow[Any]:
"""Create a login flow.""" """Create a login flow."""
auth_provider = self.auth_manager.get_auth_provider(*handler_key) auth_provider = self.auth_manager.get_auth_provider(*handler_key)
if not auth_provider: if not auth_provider:

View File

@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads
JWT_TOKEN_CACHE_SIZE = 16 JWT_TOKEN_CACHE_SIZE = 16
MAX_TOKEN_SIZE = 8192 MAX_TOKEN_SIZE = 8192
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss") _VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti")
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | { _VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
"require": [] "require": []

View File

@ -4,8 +4,9 @@ from __future__ import annotations
import logging import logging
import types import types
from typing import Any from typing import Any, Generic
from typing_extensions import TypeVar
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
@ -34,6 +35,12 @@ DATA_REQS: HassKey[set[str]] = HassKey("mfa_auth_module_reqs_processed")
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
_MultiFactorAuthModuleT = TypeVar(
"_MultiFactorAuthModuleT",
bound="MultiFactorAuthModule",
default="MultiFactorAuthModule",
)
class MultiFactorAuthModule: class MultiFactorAuthModule:
"""Multi-factor Auth Module of validation function.""" """Multi-factor Auth Module of validation function."""
@ -71,7 +78,7 @@ class MultiFactorAuthModule:
"""Return a voluptuous schema to define mfa auth module's input.""" """Return a voluptuous schema to define mfa auth module's input."""
raise NotImplementedError raise NotImplementedError
async def async_setup_flow(self, user_id: str) -> SetupFlow: async def async_setup_flow(self, user_id: str) -> SetupFlow[Any]:
"""Return a data entry flow handler for setup module. """Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow Mfa module should extend SetupFlow
@ -95,11 +102,14 @@ class MultiFactorAuthModule:
raise NotImplementedError raise NotImplementedError
class SetupFlow(data_entry_flow.FlowHandler): class SetupFlow(data_entry_flow.FlowHandler, Generic[_MultiFactorAuthModuleT]):
"""Handler for the setup flow.""" """Handler for the setup flow."""
def __init__( def __init__(
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str self,
auth_module: _MultiFactorAuthModuleT,
setup_schema: vol.Schema,
user_id: str,
) -> None: ) -> None:
"""Initialize the setup flow.""" """Initialize the setup flow."""
self._auth_module = auth_module self._auth_module = auth_module

View File

@ -162,7 +162,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
return sorted(unordered_services) return sorted(unordered_services)
async def async_setup_flow(self, user_id: str) -> SetupFlow: async def async_setup_flow(self, user_id: str) -> NotifySetupFlow:
"""Return a data entry flow handler for setup module. """Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow Mfa module should extend SetupFlow
@ -268,7 +268,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
await self.hass.services.async_call("notify", notify_service, data) await self.hass.services.async_call("notify", notify_service, data)
class NotifySetupFlow(SetupFlow): class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
"""Handler for the setup flow.""" """Handler for the setup flow."""
def __init__( def __init__(
@ -280,8 +280,6 @@ class NotifySetupFlow(SetupFlow):
) -> None: ) -> None:
"""Initialize the setup flow.""" """Initialize the setup flow."""
super().__init__(auth_module, setup_schema, user_id) super().__init__(auth_module, setup_schema, user_id)
# to fix typing complaint
self._auth_module: NotifyAuthModule = auth_module
self._available_notify_services = available_notify_services self._available_notify_services = available_notify_services
self._secret: str | None = None self._secret: str | None = None
self._count: int | None = None self._count: int | None = None

View File

@ -114,7 +114,7 @@ class TotpAuthModule(MultiFactorAuthModule):
self._users[user_id] = ota_secret # type: ignore[index] self._users[user_id] = ota_secret # type: ignore[index]
return ota_secret return ota_secret
async def async_setup_flow(self, user_id: str) -> SetupFlow: async def async_setup_flow(self, user_id: str) -> TotpSetupFlow:
"""Return a data entry flow handler for setup module. """Return a data entry flow handler for setup module.
Mfa module should extend SetupFlow Mfa module should extend SetupFlow
@ -174,20 +174,19 @@ class TotpAuthModule(MultiFactorAuthModule):
return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1)) return bool(pyotp.TOTP(ota_secret).verify(code, valid_window=1))
class TotpSetupFlow(SetupFlow): class TotpSetupFlow(SetupFlow[TotpAuthModule]):
"""Handler for the setup flow.""" """Handler for the setup flow."""
_ota_secret: str
_url: str
_image: str
def __init__( def __init__(
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
) -> None: ) -> None:
"""Initialize the setup flow.""" """Initialize the setup flow."""
super().__init__(auth_module, setup_schema, user.id) super().__init__(auth_module, setup_schema, user.id)
# to fix typing complaint
self._auth_module: TotpAuthModule = auth_module
self._user = user self._user = user
self._ota_secret: str = ""
self._url: str | None = None
self._image: str | None = None
async def async_step_init( async def async_step_init(
self, user_input: dict[str, str] | None = None self, user_input: dict[str, str] | None = None
@ -214,12 +213,11 @@ class TotpSetupFlow(SetupFlow):
errors["base"] = "invalid_code" errors["base"] = "invalid_code"
else: else:
hass = self._auth_module.hass
( (
self._ota_secret, self._ota_secret,
self._url, self._url,
self._image, self._image,
) = await hass.async_add_executor_job( ) = await self._auth_module.hass.async_add_executor_job(
_generate_secret_and_qr_code, _generate_secret_and_qr_code,
str(self._user.name), str(self._user.name),
) )

View File

@ -5,8 +5,9 @@ from __future__ import annotations
from collections.abc import Mapping from collections.abc import Mapping
import logging import logging
import types import types
from typing import Any from typing import Any, Generic
from typing_extensions import TypeVar
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
@ -46,6 +47,8 @@ AUTH_PROVIDER_SCHEMA = vol.Schema(
extra=vol.ALLOW_EXTRA, extra=vol.ALLOW_EXTRA,
) )
_AuthProviderT = TypeVar("_AuthProviderT", bound="AuthProvider", default="AuthProvider")
class AuthProvider: class AuthProvider:
"""Provider of user authentication.""" """Provider of user authentication."""
@ -105,7 +108,7 @@ class AuthProvider:
# Implement by extending class # Implement by extending class
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow[Any]:
"""Return the data flow for logging in with auth provider. """Return the data flow for logging in with auth provider.
Auth provider should extend LoginFlow and return an instance. Auth provider should extend LoginFlow and return an instance.
@ -192,12 +195,15 @@ async def load_auth_provider_module(
return module return module
class LoginFlow(FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]]): class LoginFlow(
FlowHandler[AuthFlowContext, AuthFlowResult, tuple[str, str]],
Generic[_AuthProviderT],
):
"""Handler for the login flow.""" """Handler for the login flow."""
_flow_result = AuthFlowResult _flow_result = AuthFlowResult
def __init__(self, auth_provider: AuthProvider) -> None: def __init__(self, auth_provider: _AuthProviderT) -> None:
"""Initialize the login flow.""" """Initialize the login flow."""
self._auth_provider = auth_provider self._auth_provider = auth_provider
self._auth_module_id: str | None = None self._auth_module_id: str | None = None

View File

@ -6,7 +6,7 @@ import asyncio
from collections.abc import Mapping from collections.abc import Mapping
import logging import logging
import os import os
from typing import Any, cast from typing import Any
import voluptuous as vol import voluptuous as vol
@ -59,7 +59,9 @@ class CommandLineAuthProvider(AuthProvider):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self._user_meta: dict[str, dict[str, Any]] = {} self._user_meta: dict[str, dict[str, Any]] = {}
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: async def async_login_flow(
self, context: AuthFlowContext | None
) -> CommandLineLoginFlow:
"""Return a flow to login.""" """Return a flow to login."""
return CommandLineLoginFlow(self) return CommandLineLoginFlow(self)
@ -133,7 +135,7 @@ class CommandLineAuthProvider(AuthProvider):
) )
class CommandLineLoginFlow(LoginFlow): class CommandLineLoginFlow(LoginFlow[CommandLineAuthProvider]):
"""Handler for the login flow.""" """Handler for the login flow."""
async def async_step_init( async def async_step_init(
@ -145,9 +147,9 @@ class CommandLineLoginFlow(LoginFlow):
if user_input is not None: if user_input is not None:
user_input["username"] = user_input["username"].strip() user_input["username"] = user_input["username"].strip()
try: try:
await cast( await self._auth_provider.async_validate_login(
CommandLineAuthProvider, self._auth_provider user_input["username"], user_input["password"]
).async_validate_login(user_input["username"], user_input["password"]) )
except InvalidAuthError: except InvalidAuthError:
errors["base"] = "invalid_auth" errors["base"] = "invalid_auth"

View File

@ -305,7 +305,7 @@ class HassAuthProvider(AuthProvider):
await data.async_load() await data.async_load()
self.data = data self.data = data
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: async def async_login_flow(self, context: AuthFlowContext | None) -> HassLoginFlow:
"""Return a flow to login.""" """Return a flow to login."""
return HassLoginFlow(self) return HassLoginFlow(self)
@ -400,7 +400,7 @@ class HassAuthProvider(AuthProvider):
pass pass
class HassLoginFlow(LoginFlow): class HassLoginFlow(LoginFlow[HassAuthProvider]):
"""Handler for the login flow.""" """Handler for the login flow."""
async def async_step_init( async def async_step_init(
@ -411,7 +411,7 @@ class HassLoginFlow(LoginFlow):
if user_input is not None: if user_input is not None:
try: try:
await cast(HassAuthProvider, self._auth_provider).async_validate_login( await self._auth_provider.async_validate_login(
user_input["username"], user_input["password"] user_input["username"], user_input["password"]
) )
except InvalidAuth: except InvalidAuth:

View File

@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Mapping from collections.abc import Mapping
import hmac import hmac
from typing import cast
import voluptuous as vol import voluptuous as vol
@ -36,7 +35,9 @@ class InvalidAuthError(HomeAssistantError):
class ExampleAuthProvider(AuthProvider): class ExampleAuthProvider(AuthProvider):
"""Example auth provider based on hardcoded usernames and passwords.""" """Example auth provider based on hardcoded usernames and passwords."""
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: async def async_login_flow(
self, context: AuthFlowContext | None
) -> ExampleLoginFlow:
"""Return a flow to login.""" """Return a flow to login."""
return ExampleLoginFlow(self) return ExampleLoginFlow(self)
@ -93,7 +94,7 @@ class ExampleAuthProvider(AuthProvider):
return UserMeta(name=name, is_active=True) return UserMeta(name=name, is_active=True)
class ExampleLoginFlow(LoginFlow): class ExampleLoginFlow(LoginFlow[ExampleAuthProvider]):
"""Handler for the login flow.""" """Handler for the login flow."""
async def async_step_init( async def async_step_init(
@ -104,7 +105,7 @@ class ExampleLoginFlow(LoginFlow):
if user_input is not None: if user_input is not None:
try: try:
cast(ExampleAuthProvider, self._auth_provider).async_validate_login( self._auth_provider.async_validate_login(
user_input["username"], user_input["password"] user_input["username"], user_input["password"]
) )
except InvalidAuthError: except InvalidAuthError:

View File

@ -104,7 +104,9 @@ class TrustedNetworksAuthProvider(AuthProvider):
"""Trusted Networks auth provider does not support MFA.""" """Trusted Networks auth provider does not support MFA."""
return False return False
async def async_login_flow(self, context: AuthFlowContext | None) -> LoginFlow: async def async_login_flow(
self, context: AuthFlowContext | None
) -> TrustedNetworksLoginFlow:
"""Return a flow to login.""" """Return a flow to login."""
assert context is not None assert context is not None
ip_addr = cast(IPAddress, context.get("ip_address")) ip_addr = cast(IPAddress, context.get("ip_address"))
@ -214,7 +216,7 @@ class TrustedNetworksAuthProvider(AuthProvider):
self.async_validate_access(ip_address(remote_ip)) self.async_validate_access(ip_address(remote_ip))
class TrustedNetworksLoginFlow(LoginFlow): class TrustedNetworksLoginFlow(LoginFlow[TrustedNetworksAuthProvider]):
"""Handler for the login flow.""" """Handler for the login flow."""
def __init__( def __init__(
@ -235,9 +237,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
) -> AuthFlowResult: ) -> AuthFlowResult:
"""Handle the step of the form.""" """Handle the step of the form."""
try: try:
cast( self._auth_provider.async_validate_access(self._ip_address)
TrustedNetworksAuthProvider, self._auth_provider
).async_validate_access(self._ip_address)
except InvalidAuthError: except InvalidAuthError:
return self.async_abort(reason="not_allowed") return self.async_abort(reason="not_allowed")

View File

@ -1,6 +1,10 @@
"""Home Assistant module to handle restoring backups.""" """Home Assistant module to handle restoring backups."""
from __future__ import annotations
from collections.abc import Iterable
from dataclasses import dataclass from dataclasses import dataclass
import hashlib
import json import json
import logging import logging
from pathlib import Path from pathlib import Path
@ -14,7 +18,12 @@ import securetar
from .const import __version__ as HA_VERSION from .const import __version__ as HA_VERSION
RESTORE_BACKUP_FILE = ".HA_RESTORE" RESTORE_BACKUP_FILE = ".HA_RESTORE"
KEEP_PATHS = ("backups",) KEEP_BACKUPS = ("backups",)
KEEP_DATABASE = (
"home-assistant_v2.db",
"home-assistant_v2.db-wal",
)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -24,6 +33,21 @@ class RestoreBackupFileContent:
"""Definition for restore backup file content.""" """Definition for restore backup file content."""
backup_file_path: Path backup_file_path: Path
password: str | None
remove_after_restore: bool
restore_database: bool
restore_homeassistant: bool
def password_to_key(password: str) -> bytes:
"""Generate a AES Key from password.
Matches the implementation in supervisor.backups.utils.password_to_key.
"""
key: bytes = password.encode()
for _ in range(100):
key = hashlib.sha256(key).digest()
return key[:16]
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None: def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
@ -32,20 +56,27 @@ def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent |
try: try:
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8")) instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
return RestoreBackupFileContent( return RestoreBackupFileContent(
backup_file_path=Path(instruction_content["path"]) backup_file_path=Path(instruction_content["path"]),
password=instruction_content["password"],
remove_after_restore=instruction_content["remove_after_restore"],
restore_database=instruction_content["restore_database"],
restore_homeassistant=instruction_content["restore_homeassistant"],
) )
except (FileNotFoundError, json.JSONDecodeError): except (FileNotFoundError, KeyError, json.JSONDecodeError):
return None return None
finally:
# Always remove the backup instruction file to prevent a boot loop
instruction_path.unlink(missing_ok=True)
def _clear_configuration_directory(config_dir: Path) -> None: def _clear_configuration_directory(config_dir: Path, keep: Iterable[str]) -> None:
"""Delete all files and directories in the config directory except for the backups directory.""" """Delete all files and directories in the config directory except entries in the keep list."""
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS] keep_paths = [config_dir.joinpath(path) for path in keep]
config_contents = sorted( entries_to_remove = sorted(
[entry for entry in config_dir.iterdir() if entry not in keep_paths] entry for entry in config_dir.iterdir() if entry not in keep_paths
) )
for entry in config_contents: for entry in entries_to_remove:
entrypath = config_dir.joinpath(entry) entrypath = config_dir.joinpath(entry)
if entrypath.is_file(): if entrypath.is_file():
@ -54,12 +85,15 @@ def _clear_configuration_directory(config_dir: Path) -> None:
shutil.rmtree(entrypath) shutil.rmtree(entrypath)
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: def _extract_backup(
config_dir: Path,
restore_content: RestoreBackupFileContent,
) -> None:
"""Extract the backup file to the config directory.""" """Extract the backup file to the config directory."""
with ( with (
TemporaryDirectory() as tempdir, TemporaryDirectory() as tempdir,
securetar.SecureTarFile( securetar.SecureTarFile(
backup_file_path, restore_content.backup_file_path,
gzip=False, gzip=False,
mode="r", mode="r",
) as ostf, ) as ostf,
@ -88,22 +122,41 @@ def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}", f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
), ),
gzip=backup_meta["compressed"], gzip=backup_meta["compressed"],
key=password_to_key(restore_content.password)
if restore_content.password is not None
else None,
mode="r", mode="r",
) as istf: ) as istf:
for member in istf.getmembers():
if member.name == "data":
continue
member.name = member.name.replace("data/", "")
_clear_configuration_directory(config_dir)
istf.extractall( istf.extractall(
path=config_dir, path=Path(tempdir, "homeassistant"),
members=[ members=securetar.secure_path(istf),
member
for member in securetar.secure_path(istf)
if member.name != "data"
],
filter="fully_trusted", filter="fully_trusted",
) )
if restore_content.restore_homeassistant:
keep = list(KEEP_BACKUPS)
if not restore_content.restore_database:
keep.extend(KEEP_DATABASE)
_clear_configuration_directory(config_dir, keep)
shutil.copytree(
Path(tempdir, "homeassistant", "data"),
config_dir,
dirs_exist_ok=True,
ignore=shutil.ignore_patterns(*(keep)),
)
elif restore_content.restore_database:
for entry in KEEP_DATABASE:
entrypath = config_dir / entry
if entrypath.is_file():
entrypath.unlink()
elif entrypath.is_dir():
shutil.rmtree(entrypath)
for entry in KEEP_DATABASE:
shutil.copy(
Path(tempdir, "homeassistant", "data", entry),
config_dir,
)
def restore_backup(config_dir_path: str) -> bool: def restore_backup(config_dir_path: str) -> bool:
@ -119,8 +172,13 @@ def restore_backup(config_dir_path: str) -> bool:
backup_file_path = restore_content.backup_file_path backup_file_path = restore_content.backup_file_path
_LOGGER.info("Restoring %s", backup_file_path) _LOGGER.info("Restoring %s", backup_file_path)
try: try:
_extract_backup(config_dir, backup_file_path) _extract_backup(
config_dir=config_dir,
restore_content=restore_content,
)
except FileNotFoundError as err: except FileNotFoundError as err:
raise ValueError(f"Backup file {backup_file_path} does not exist") from err raise ValueError(f"Backup file {backup_file_path} does not exist") from err
if restore_content.remove_after_restore:
backup_file_path.unlink(missing_ok=True)
_LOGGER.info("Restore complete, restarting") _LOGGER.info("Restore complete, restarting")
return True return True

View File

@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
return False return False
def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool:
# If only cadata is passed, we can ignore it
kwargs = mapped_args.get("kwargs")
return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs)
@dataclass(slots=True, frozen=True) @dataclass(slots=True, frozen=True)
class BlockingCall: class BlockingCall:
"""Class to hold information about a blocking call.""" """Class to hold information about a blocking call."""
@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
original_func=SSLContext.load_verify_locations, original_func=SSLContext.load_verify_locations,
object=SSLContext, object=SSLContext,
function="load_verify_locations", function="load_verify_locations",
check_allowed=None, check_allowed=_check_load_verify_locations_call_allowed,
strict=False, strict=False,
strict_core=False, strict_core=False,
skip_for_tests=True, skip_for_tests=True,

View File

@ -89,7 +89,7 @@ from .helpers import (
) )
from .helpers.dispatcher import async_dispatcher_send_internal from .helpers.dispatcher import async_dispatcher_send_internal
from .helpers.storage import get_internal_store_manager from .helpers.storage import get_internal_store_manager
from .helpers.system_info import async_get_system_info, is_official_image from .helpers.system_info import async_get_system_info
from .helpers.typing import ConfigType from .helpers.typing import ConfigType
from .setup import ( from .setup import (
# _setup_started is marked as protected to make it clear # _setup_started is marked as protected to make it clear
@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
from .util.hass_dict import HassKey from .util.hass_dict import HassKey
from .util.logging import async_activate_log_queue_handler from .util.logging import async_activate_log_queue_handler
from .util.package import async_get_user_site, is_docker_env, is_virtual_env from .util.package import async_get_user_site, is_docker_env, is_virtual_env
from .util.system_info import is_official_image
with contextlib.suppress(ImportError): with contextlib.suppress(ImportError):
# Ensure anyio backend is imported to avoid it being imported in the event loop # Ensure anyio backend is imported to avoid it being imported in the event loop
@ -252,6 +253,7 @@ PRELOAD_STORAGE = [
"assist_pipeline.pipelines", "assist_pipeline.pipelines",
"core.analytics", "core.analytics",
"auth_module.totp", "auth_module.totp",
"backup",
] ]
@ -515,7 +517,7 @@ async def async_from_config_dict(
issue_registry.async_create_issue( issue_registry.async_create_issue(
hass, hass,
core.DOMAIN, core.DOMAIN,
"python_version", f"python_version_{required_python_version}",
is_fixable=False, is_fixable=False,
severity=issue_registry.IssueSeverity.WARNING, severity=issue_registry.IssueSeverity.WARNING,
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE, breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,

View File

@ -2,6 +2,7 @@
"domain": "microsoft", "domain": "microsoft",
"name": "Microsoft", "name": "Microsoft",
"integrations": [ "integrations": [
"azure_data_explorer",
"azure_devops", "azure_devops",
"azure_event_hub", "azure_event_hub",
"azure_service_bus", "azure_service_bus",

View File

@ -0,0 +1,5 @@
{
"domain": "sky",
"name": "Sky",
"integrations": ["sky_hub", "sky_remote"]
}

View File

@ -0,0 +1,5 @@
{
"domain": "slide",
"name": "Slide",
"integrations": ["slide", "slide_local"]
}

View File

@ -112,9 +112,6 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Handle a flow initialized by the user.""" """Handle a flow initialized by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if user_input is None: if user_input is None:
return self.async_show_form( return self.async_show_form(
step_id="user", data_schema=vol.Schema(self.data_schema) step_id="user", data_schema=vol.Schema(self.data_schema)

View File

@ -9,18 +9,16 @@ from jaraco.abode.devices.light import Light
from homeassistant.components.light import ( from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP, ATTR_COLOR_TEMP_KELVIN,
ATTR_HS_COLOR, ATTR_HS_COLOR,
DEFAULT_MAX_KELVIN,
DEFAULT_MIN_KELVIN,
ColorMode, ColorMode,
LightEntity, LightEntity,
) )
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util.color import (
color_temperature_kelvin_to_mired,
color_temperature_mired_to_kelvin,
)
from . import AbodeSystem from . import AbodeSystem
from .const import DOMAIN from .const import DOMAIN
@ -44,13 +42,13 @@ class AbodeLight(AbodeDevice, LightEntity):
_device: Light _device: Light
_attr_name = None _attr_name = None
_attr_max_color_temp_kelvin = DEFAULT_MAX_KELVIN
_attr_min_color_temp_kelvin = DEFAULT_MIN_KELVIN
def turn_on(self, **kwargs: Any) -> None: def turn_on(self, **kwargs: Any) -> None:
"""Turn on the light.""" """Turn on the light."""
if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable: if ATTR_COLOR_TEMP_KELVIN in kwargs and self._device.is_color_capable:
self._device.set_color_temp( self._device.set_color_temp(kwargs[ATTR_COLOR_TEMP_KELVIN])
int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]))
)
return return
if ATTR_HS_COLOR in kwargs and self._device.is_color_capable: if ATTR_HS_COLOR in kwargs and self._device.is_color_capable:
@ -85,10 +83,10 @@ class AbodeLight(AbodeDevice, LightEntity):
return None return None
@property @property
def color_temp(self) -> int | None: def color_temp_kelvin(self) -> int | None:
"""Return the color temp of the light.""" """Return the color temp of the light."""
if self._device.has_color: if self._device.has_color:
return color_temperature_kelvin_to_mired(self._device.color_temp) return int(self._device.color_temp)
return None return None
@property @property

View File

@ -9,5 +9,6 @@
}, },
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["jaraco.abode", "lomond"], "loggers": ["jaraco.abode", "lomond"],
"requirements": ["jaraco.abode==6.2.1"] "requirements": ["jaraco.abode==6.2.1"],
"single_config_entry": true
} }

View File

@ -28,24 +28,23 @@
"invalid_mfa_code": "Invalid MFA code" "invalid_mfa_code": "Invalid MFA code"
}, },
"abort": { "abort": {
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
} }
}, },
"services": { "services": {
"capture_image": { "capture_image": {
"name": "Capture image", "name": "Capture image",
"description": "Request a new image capture from a camera device.", "description": "Requests a new image capture from a camera device.",
"fields": { "fields": {
"entity_id": { "entity_id": {
"name": "Entity", "name": "Entity",
"description": "Entity id of the camera to request an image." "description": "Entity ID of the camera to request an image from."
} }
} }
}, },
"change_setting": { "change_setting": {
"name": "Change setting", "name": "Change setting",
"description": "Change an Abode system setting.", "description": "Changes an Abode system setting.",
"fields": { "fields": {
"setting": { "setting": {
"name": "Setting", "name": "Setting",
@ -59,11 +58,11 @@
}, },
"trigger_automation": { "trigger_automation": {
"name": "Trigger automation", "name": "Trigger automation",
"description": "Trigger an Abode automation.", "description": "Triggers an Abode automation.",
"fields": { "fields": {
"entity_id": { "entity_id": {
"name": "Entity", "name": "Entity",
"description": "Entity id of the automation to trigger." "description": "Entity ID of the automation to trigger."
} }
} }
} }

View File

@ -0,0 +1,31 @@
"""Initialize the Acaia component."""
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.SENSOR,
]
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
"""Set up acaia as config entry."""
coordinator = AcaiaCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@ -0,0 +1,61 @@
"""Binary sensor platform for Acaia scales."""
from collections.abc import Callable
from dataclasses import dataclass
from aioacaia.acaiascale import AcaiaScale
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AcaiaConfigEntry
from .entity import AcaiaEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(kw_only=True, frozen=True)
class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Description for Acaia binary sensor entities."""
is_on_fn: Callable[[AcaiaScale], bool]
BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = (
AcaiaBinarySensorEntityDescription(
key="timer_running",
translation_key="timer_running",
device_class=BinarySensorDeviceClass.RUNNING,
is_on_fn=lambda scale: scale.timer_running,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up binary sensors."""
coordinator = entry.runtime_data
async_add_entities(
AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS
)
class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity):
"""Representation of an Acaia binary sensor."""
entity_description: AcaiaBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.entity_description.is_on_fn(self._scale)

View File

@ -0,0 +1,63 @@
"""Button entities for Acaia scales."""
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import Any
from aioacaia.acaiascale import AcaiaScale
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AcaiaConfigEntry
from .entity import AcaiaEntity
PARALLEL_UPDATES = 0
@dataclass(kw_only=True, frozen=True)
class AcaiaButtonEntityDescription(ButtonEntityDescription):
"""Description for acaia button entities."""
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
AcaiaButtonEntityDescription(
key="tare",
translation_key="tare",
press_fn=lambda scale: scale.tare(),
),
AcaiaButtonEntityDescription(
key="reset_timer",
translation_key="reset_timer",
press_fn=lambda scale: scale.reset_timer(),
),
AcaiaButtonEntityDescription(
key="start_stop",
translation_key="start_stop",
press_fn=lambda scale: scale.start_stop_timer(),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up button entities and services."""
coordinator = entry.runtime_data
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
class AcaiaButton(AcaiaEntity, ButtonEntity):
"""Representation of an Acaia button."""
entity_description: AcaiaButtonEntityDescription
async def async_press(self) -> None:
"""Handle the button press."""
await self.entity_description.press_fn(self._scale)

View File

@ -0,0 +1,149 @@
"""Config flow for Acaia integration."""
import logging
from typing import Any
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
from aioacaia.helpers import is_new_scale
import voluptuous as vol
from homeassistant.components.bluetooth import (
BluetoothServiceInfoBleak,
async_discovered_service_info,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ADDRESS, CONF_NAME
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
_LOGGER = logging.getLogger(__name__)
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for acaia."""
def __init__(self) -> None:
"""Initialize the config flow."""
self._discovered: dict[str, Any] = {}
self._discovered_devices: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
errors: dict[str, str] = {}
if user_input is not None:
mac = user_input[CONF_ADDRESS]
try:
is_new_style_scale = await is_new_scale(mac)
except AcaiaDeviceNotFound:
errors["base"] = "device_not_found"
except AcaiaError:
_LOGGER.exception("Error occurred while connecting to the scale")
errors["base"] = "unknown"
except AcaiaUnknownDevice:
return self.async_abort(reason="unsupported_device")
else:
await self.async_set_unique_id(format_mac(mac))
self._abort_if_unique_id_configured()
if not errors:
return self.async_create_entry(
title=self._discovered_devices[mac],
data={
CONF_ADDRESS: mac,
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
},
)
for device in async_discovered_service_info(self.hass):
self._discovered_devices[device.address] = device.name
if not self._discovered_devices:
return self.async_abort(reason="no_devices_found")
options = [
SelectOptionDict(
value=device_mac,
label=f"{device_name} ({device_mac})",
)
for device_mac, device_name in self._discovered_devices.items()
]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_ADDRESS): SelectSelector(
SelectSelectorConfig(
options=options,
mode=SelectSelectorMode.DROPDOWN,
)
)
}
),
errors=errors,
)
async def async_step_bluetooth(
self, discovery_info: BluetoothServiceInfoBleak
) -> ConfigFlowResult:
"""Handle a discovered Bluetooth device."""
self._discovered[CONF_ADDRESS] = discovery_info.address
self._discovered[CONF_NAME] = discovery_info.name
await self.async_set_unique_id(format_mac(discovery_info.address))
self._abort_if_unique_id_configured()
try:
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
discovery_info.address
)
except AcaiaDeviceNotFound:
_LOGGER.debug("Device not found during discovery")
return self.async_abort(reason="device_not_found")
except AcaiaError:
_LOGGER.debug(
"Error occurred while connecting to the scale during discovery",
exc_info=True,
)
return self.async_abort(reason="unknown")
except AcaiaUnknownDevice:
_LOGGER.debug("Unsupported device during discovery")
return self.async_abort(reason="unsupported_device")
return await self.async_step_bluetooth_confirm()
async def async_step_bluetooth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle confirmation of Bluetooth discovery."""
if user_input is not None:
return self.async_create_entry(
title=self._discovered[CONF_NAME],
data={
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
},
)
self.context["title_placeholders"] = placeholders = {
CONF_NAME: self._discovered[CONF_NAME]
}
self._set_confirm_only()
return self.async_show_form(
step_id="bluetooth_confirm",
description_placeholders=placeholders,
)

View File

@ -0,0 +1,4 @@
"""Constants for component."""
DOMAIN = "acaia"
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"

View File

@ -0,0 +1,86 @@
"""Coordinator for Acaia integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from aioacaia.acaiascale import AcaiaScale
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ADDRESS
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_IS_NEW_STYLE_SCALE
SCAN_INTERVAL = timedelta(seconds=15)
_LOGGER = logging.getLogger(__name__)
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
class AcaiaCoordinator(DataUpdateCoordinator[None]):
"""Class to handle fetching data from the scale."""
config_entry: AcaiaConfigEntry
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
name="acaia coordinator",
update_interval=SCAN_INTERVAL,
config_entry=entry,
)
self._scale = AcaiaScale(
address_or_ble_device=entry.data[CONF_ADDRESS],
name=entry.title,
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
notify_callback=self.async_update_listeners,
)
@property
def scale(self) -> AcaiaScale:
"""Return the scale object."""
return self._scale
async def _async_update_data(self) -> None:
"""Fetch data."""
# scale is already connected, return
if self._scale.connected:
return
# scale is not connected, try to connect
try:
await self._scale.connect(setup_tasks=False)
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
_LOGGER.debug(
"Could not connect to scale: %s, Error: %s",
self.config_entry.data[CONF_ADDRESS],
ex,
)
self._scale.device_disconnected_handler(notify=False)
return
# connected, set up background tasks
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
hass=self.hass,
target=self._scale.send_heartbeats(),
name="acaia_heartbeat_task",
)
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
self._scale.process_queue_task = (
self.config_entry.async_create_background_task(
hass=self.hass,
target=self._scale.process_queue(),
name="acaia_process_queue_task",
)
)

View File

@ -0,0 +1,31 @@
"""Diagnostics support for Acaia."""
from __future__ import annotations
from dataclasses import asdict
from typing import Any
from homeassistant.core import HomeAssistant
from . import AcaiaConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
scale = coordinator.scale
# collect all data sources
return {
"model": scale.model,
"device_state": (
asdict(scale.device_state) if scale.device_state is not None else ""
),
"mac": scale.mac,
"last_disconnect_time": scale.last_disconnect_time,
"timer": scale.timer,
"weight": scale.weight,
}

View File

@ -0,0 +1,46 @@
"""Base class for Acaia entities."""
from dataclasses import dataclass
from homeassistant.helpers.device_registry import (
CONNECTION_BLUETOOTH,
DeviceInfo,
format_mac,
)
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import AcaiaCoordinator
@dataclass
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
"""Common elements for all entities."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: AcaiaCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._scale = coordinator.scale
formatted_mac = format_mac(self._scale.mac)
self._attr_unique_id = f"{formatted_mac}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, formatted_mac)},
manufacturer="Acaia",
model=self._scale.model,
suggested_area="Kitchen",
connections={(CONNECTION_BLUETOOTH, self._scale.mac)},
)
@property
def available(self) -> bool:
"""Returns whether entity is available."""
return super().available and self._scale.connected

View File

@ -0,0 +1,24 @@
{
"entity": {
"binary_sensor": {
"timer_running": {
"default": "mdi:timer",
"state": {
"on": "mdi:timer-play",
"off": "mdi:timer-off"
}
}
},
"button": {
"tare": {
"default": "mdi:scale-balance"
},
"reset_timer": {
"default": "mdi:timer-refresh"
},
"start_stop": {
"default": "mdi:timer-play"
}
}
}
}

View File

@ -0,0 +1,30 @@
{
"domain": "acaia",
"name": "Acaia",
"bluetooth": [
{
"manufacturer_id": 16962
},
{
"local_name": "ACAIA*"
},
{
"local_name": "PYXIS-*"
},
{
"local_name": "LUNAR-*"
},
{
"local_name": "PROCHBT001"
}
],
"codeowners": ["@zweckj"],
"config_flow": true,
"dependencies": ["bluetooth_adapters"],
"documentation": "https://www.home-assistant.io/integrations/acaia",
"integration_type": "device",
"iot_class": "local_push",
"loggers": ["aioacaia"],
"quality_scale": "platinum",
"requirements": ["aioacaia==0.1.13"]
}

View File

@ -0,0 +1,106 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
No custom actions are defined.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
No custom actions are defined.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: |
No explicit event subscriptions.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup:
status: exempt
comment: |
Device is expected to be offline most of the time, but needs to connect quickly once available.
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable:
status: done
comment: |
Handled by coordinator.
parallel-updates: done
reauthentication-flow:
status: exempt
comment: |
No authentication required.
test-coverage: done
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: |
No IP discovery.
discovery:
status: done
comment: |
Bluetooth discovery.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: |
Device type integration.
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: |
No noisy/non-essential entities.
entity-translations: done
exception-translations:
status: exempt
comment: |
No custom exceptions.
icon-translations: done
reconfiguration-flow:
status: exempt
comment: |
Only parameter that could be changed (MAC = unique_id) would force a new config entry.
repair-issues:
status: exempt
comment: |
No repairs/issues.
stale-devices:
status: exempt
comment: |
Device type integration.
# Platinum
async-dependency: done
inject-websession:
status: exempt
comment: |
Bluetooth connection.
strict-typing: done

View File

@ -0,0 +1,146 @@
"""Sensor platform for Acaia."""
from collections.abc import Callable
from dataclasses import dataclass
from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale
from aioacaia.const import UnitMass as AcaiaUnitOfMass
from homeassistant.components.sensor import (
RestoreSensor,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorExtraStoredData,
SensorStateClass,
)
from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AcaiaConfigEntry
from .entity import AcaiaEntity
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
@dataclass(kw_only=True, frozen=True)
class AcaiaSensorEntityDescription(SensorEntityDescription):
"""Description for Acaia sensor entities."""
value_fn: Callable[[AcaiaScale], int | float | None]
@dataclass(kw_only=True, frozen=True)
class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription):
"""Description for Acaia sensor entities with dynamic units."""
unit_fn: Callable[[AcaiaDeviceState], str] | None = None
SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
AcaiaDynamicUnitSensorEntityDescription(
key="weight",
device_class=SensorDeviceClass.WEIGHT,
native_unit_of_measurement=UnitOfMass.GRAMS,
state_class=SensorStateClass.MEASUREMENT,
unit_fn=lambda data: (
UnitOfMass.OUNCES
if data.units == AcaiaUnitOfMass.OUNCES
else UnitOfMass.GRAMS
),
value_fn=lambda scale: scale.weight,
),
AcaiaDynamicUnitSensorEntityDescription(
key="flow_rate",
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND,
suggested_display_precision=1,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda scale: scale.flow_rate,
),
)
RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = (
AcaiaSensorEntityDescription(
key="battery",
device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda scale: (
scale.device_state.battery_level if scale.device_state else None
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AcaiaConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up sensors."""
coordinator = entry.runtime_data
entities: list[SensorEntity] = [
AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS
]
entities.extend(
AcaiaRestoreSensor(coordinator, entity_description)
for entity_description in RESTORE_SENSORS
)
async_add_entities(entities)
class AcaiaSensor(AcaiaEntity, SensorEntity):
"""Representation of an Acaia sensor."""
entity_description: AcaiaDynamicUnitSensorEntityDescription
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit of measurement of this entity."""
if (
self._scale.device_state is not None
and self.entity_description.unit_fn is not None
):
return self.entity_description.unit_fn(self._scale.device_state)
return self.entity_description.native_unit_of_measurement
@property
def native_value(self) -> int | float | None:
"""Return the state of the entity."""
return self.entity_description.value_fn(self._scale)
class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor):
"""Representation of an Acaia sensor with restore capabilities."""
entity_description: AcaiaSensorEntityDescription
_restored_data: SensorExtraStoredData | None = None
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
self._restored_data = await self.async_get_last_sensor_data()
if self._restored_data is not None:
self._attr_native_value = self._restored_data.native_value
self._attr_native_unit_of_measurement = (
self._restored_data.native_unit_of_measurement
)
if self._scale.device_state is not None:
self._attr_native_value = self.entity_description.value_fn(self._scale)
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self._scale.device_state is not None:
self._attr_native_value = self.entity_description.value_fn(self._scale)
self._async_write_ha_state()
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available or self._restored_data is not None

View File

@ -0,0 +1,46 @@
{
"config": {
"flow_title": "{name}",
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
"unsupported_device": "This device is not supported."
},
"error": {
"device_not_found": "Device could not be found.",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"bluetooth_confirm": {
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
},
"user": {
"description": "[%key:component::bluetooth::config::step::user::description%]",
"data": {
"address": "[%key:common::config_flow::data::device%]"
},
"data_description": {
"address": "Select Acaia scale you want to set up"
}
}
}
},
"entity": {
"binary_sensor": {
"timer_running": {
"name": "Timer running"
}
},
"button": {
"tare": {
"name": "Tare"
},
"reset_timer": {
"name": "Reset timer"
},
"start_stop": {
"name": "Start/stop timer"
}
}
}
}

View File

@ -7,7 +7,6 @@
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["accuweather"], "loggers": ["accuweather"],
"quality_scale": "platinum", "requirements": ["accuweather==4.0.0"],
"requirements": ["accuweather==3.0.0"],
"single_config_entry": true "single_config_entry": true
} }

View File

@ -4,5 +4,6 @@
"codeowners": [], "codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/acer_projector", "documentation": "https://www.home-assistant.io/integrations/acer_projector",
"iot_class": "local_polling", "iot_class": "local_polling",
"quality_scale": "legacy",
"requirements": ["pyserial==3.5"] "requirements": ["pyserial==3.5"]
} }

View File

@ -3,5 +3,6 @@
"name": "Actiontec", "name": "Actiontec",
"codeowners": [], "codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/actiontec", "documentation": "https://www.home-assistant.io/integrations/actiontec",
"iot_class": "local_polling" "iot_class": "local_polling",
"quality_scale": "legacy"
} }

View File

@ -75,7 +75,6 @@ class AdaxDevice(ClimateEntity):
) )
_attr_target_temperature_step = PRECISION_WHOLE _attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_temperature_unit = UnitOfTemperature.CELSIUS
_enable_turn_on_off_backwards_compatibility = False
def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None: def __init__(self, heater_data: dict[str, Any], adax_data_handler: Adax) -> None:
"""Initialize the heater.""" """Initialize the heater."""

View File

@ -37,7 +37,7 @@ STATE_KEY_POSITION = "position"
PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend( PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend(
{ {
vol.Optional(CONF_ADS_VAR): cv.string, vol.Required(CONF_ADS_VAR): cv.string,
vol.Optional(CONF_ADS_VAR_POSITION): cv.string, vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string, vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string, vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/ads", "documentation": "https://www.home-assistant.io/integrations/ads",
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["pyads"], "loggers": ["pyads"],
"quality_scale": "legacy",
"requirements": ["pyads==3.4.0"] "requirements": ["pyads==3.4.0"]
} }

View File

@ -102,7 +102,6 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity):
_attr_max_temp = 32 _attr_max_temp = 32
_attr_min_temp = 16 _attr_min_temp = 16
_attr_name = None _attr_name = None
_enable_turn_on_off_backwards_compatibility = False
_support_preset = ClimateEntityFeature(0) _support_preset = ClimateEntityFeature(0)
def __init__(self, instance: AdvantageAirData, ac_key: str) -> None: def __init__(self, instance: AdvantageAirData, ac_key: str) -> None:
@ -261,7 +260,6 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity):
_attr_target_temperature_step = PRECISION_WHOLE _attr_target_temperature_step = PRECISION_WHOLE
_attr_max_temp = 32 _attr_max_temp = 32
_attr_min_temp = 16 _attr_min_temp = 16
_enable_turn_on_off_backwards_compatibility = False
def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None: def __init__(self, instance: AdvantageAirData, ac_key: str, zone_key: str) -> None:
"""Initialize an AdvantageAir Zone control.""" """Initialize an AdvantageAir Zone control."""

View File

@ -6,6 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/advantage_air", "documentation": "https://www.home-assistant.io/integrations/advantage_air",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["advantage_air"], "loggers": ["advantage_air"],
"quality_scale": "platinum",
"requirements": ["advantage-air==0.4.4"] "requirements": ["advantage-air==0.4.4"]
} }

View File

@ -1,17 +1,19 @@
"""The AEMET OpenData component.""" """The AEMET OpenData component."""
import logging import logging
import shutil
from aemet_opendata.exceptions import AemetError, TownNotFound from aemet_opendata.exceptions import AemetError, TownNotFound
from aemet_opendata.interface import AEMET, ConnectionOptions from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.storage import STORAGE_DIR
from .const import CONF_STATION_UPDATES, PLATFORMS from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DOMAIN, PLATFORMS
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -23,10 +25,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
api_key = entry.data[CONF_API_KEY] api_key = entry.data[CONF_API_KEY]
latitude = entry.data[CONF_LATITUDE] latitude = entry.data[CONF_LATITUDE]
longitude = entry.data[CONF_LONGITUDE] longitude = entry.data[CONF_LONGITUDE]
station_updates = entry.options.get(CONF_STATION_UPDATES, True) update_features: int = UpdateFeature.FORECAST
if entry.options.get(CONF_RADAR_UPDATES, False):
update_features |= UpdateFeature.RADAR
if entry.options.get(CONF_STATION_UPDATES, True):
update_features |= UpdateFeature.STATION
options = ConnectionOptions(api_key, station_updates) options = ConnectionOptions(api_key, update_features)
aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options) aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options)
aemet.set_api_data_dir(hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"))
try: try:
await aemet.select_coordinates(latitude, longitude) await aemet.select_coordinates(latitude, longitude)
except TownNotFound as err: except TownNotFound as err:
@ -55,3 +63,11 @@ async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Remove a config entry."""
await hass.async_add_executor_job(
shutil.rmtree,
hass.config.path(STORAGE_DIR, f"{DOMAIN}-{entry.unique_id}"),
)

View File

@ -17,10 +17,11 @@ from homeassistant.helpers.schema_config_entry_flow import (
SchemaOptionsFlowHandler, SchemaOptionsFlowHandler,
) )
from .const import CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN from .const import CONF_RADAR_UPDATES, CONF_STATION_UPDATES, DEFAULT_NAME, DOMAIN
OPTIONS_SCHEMA = vol.Schema( OPTIONS_SCHEMA = vol.Schema(
{ {
vol.Required(CONF_RADAR_UPDATES, default=False): bool,
vol.Required(CONF_STATION_UPDATES, default=True): bool, vol.Required(CONF_STATION_UPDATES, default=True): bool,
} }
) )
@ -45,7 +46,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
await self.async_set_unique_id(f"{latitude}-{longitude}") await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured() self._abort_if_unique_id_configured()
options = ConnectionOptions(user_input[CONF_API_KEY], False) options = ConnectionOptions(user_input[CONF_API_KEY])
aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options) aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options)
try: try:
await aemet.select_coordinates(latitude, longitude) await aemet.select_coordinates(latitude, longitude)

View File

@ -51,8 +51,9 @@ from homeassistant.components.weather import (
from homeassistant.const import Platform from homeassistant.const import Platform
ATTRIBUTION = "Powered by AEMET OpenData" ATTRIBUTION = "Powered by AEMET OpenData"
CONF_RADAR_UPDATES = "radar_updates"
CONF_STATION_UPDATES = "station_updates" CONF_STATION_UPDATES = "station_updates"
PLATFORMS = [Platform.SENSOR, Platform.WEATHER] PLATFORMS = [Platform.IMAGE, Platform.SENSOR, Platform.WEATHER]
DEFAULT_NAME = "AEMET" DEFAULT_NAME = "AEMET"
DOMAIN = "aemet" DOMAIN = "aemet"

View File

@ -4,7 +4,7 @@ from __future__ import annotations
from typing import Any from typing import Any
from aemet_opendata.const import AOD_COORDS from aemet_opendata.const import AOD_COORDS, AOD_IMG_BYTES
from homeassistant.components.diagnostics import async_redact_data from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import ( from homeassistant.const import (
@ -26,6 +26,7 @@ TO_REDACT_CONFIG = [
TO_REDACT_COORD = [ TO_REDACT_COORD = [
AOD_COORDS, AOD_COORDS,
AOD_IMG_BYTES,
] ]

View File

@ -0,0 +1,86 @@
"""Support for the AEMET OpenData images."""
from __future__ import annotations
from typing import Final
from aemet_opendata.const import AOD_DATETIME, AOD_IMG_BYTES, AOD_IMG_TYPE, AOD_RADAR
from aemet_opendata.helpers import dict_nested_value
from homeassistant.components.image import Image, ImageEntity, ImageEntityDescription
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
from .entity import AemetEntity
AEMET_IMAGES: Final[tuple[ImageEntityDescription, ...]] = (
ImageEntityDescription(
key=AOD_RADAR,
translation_key="weather_radar",
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: AemetConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up AEMET OpenData image entities based on a config entry."""
domain_data = config_entry.runtime_data
name = domain_data.name
coordinator = domain_data.coordinator
unique_id = config_entry.unique_id
assert unique_id is not None
async_add_entities(
AemetImage(
hass,
name,
coordinator,
description,
unique_id,
)
for description in AEMET_IMAGES
if dict_nested_value(coordinator.data["lib"], [description.key]) is not None
)
class AemetImage(AemetEntity, ImageEntity):
"""Implementation of an AEMET OpenData image."""
entity_description: ImageEntityDescription
def __init__(
self,
hass: HomeAssistant,
name: str,
coordinator: WeatherUpdateCoordinator,
description: ImageEntityDescription,
unique_id: str,
) -> None:
"""Initialize the image."""
super().__init__(coordinator, name, unique_id)
ImageEntity.__init__(self, hass)
self.entity_description = description
self._attr_unique_id = f"{unique_id}-{description.key}"
self._async_update_attrs()
@callback
def _handle_coordinator_update(self) -> None:
"""Update attributes when the coordinator updates."""
self._async_update_attrs()
super()._handle_coordinator_update()
@callback
def _async_update_attrs(self) -> None:
"""Update image attributes."""
image_data = self.get_aemet_value([self.entity_description.key])
self._cached_image = Image(
content_type=image_data.get(AOD_IMG_TYPE),
content=image_data.get(AOD_IMG_BYTES),
)
self._attr_image_last_updated = image_data.get(AOD_DATETIME)

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/aemet", "documentation": "https://www.home-assistant.io/integrations/aemet",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["aemet_opendata"], "loggers": ["aemet_opendata"],
"requirements": ["AEMET-OpenData==0.5.4"] "requirements": ["AEMET-OpenData==0.6.4"]
} }

View File

@ -18,10 +18,18 @@
} }
} }
}, },
"entity": {
"image": {
"weather_radar": {
"name": "Weather radar"
}
}
},
"options": { "options": {
"step": { "step": {
"init": { "init": {
"data": { "data": {
"radar_updates": "Gather data from AEMET weather radar",
"station_updates": "Gather data from AEMET weather stations" "station_updates": "Gather data from AEMET weather stations"
} }
} }

View File

@ -0,0 +1,86 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: todo
docs-installation-instructions: todo
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: No options to configure
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow:
status: exempt
comment: |
This integration does not require authentication.
test-coverage: todo
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: todo
comment: DHCP is still possible
discovery:
status: todo
comment: DHCP is still possible
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
This integration has a fixed single device.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
This integration doesn't have any cases where raising an issue is needed.
stale-devices:
status: exempt
comment: |
This integration has a fixed single device.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -7,6 +7,5 @@
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["airly"], "loggers": ["airly"],
"quality_scale": "platinum",
"requirements": ["airly==1.1.0"] "requirements": ["airly==1.1.0"]
} }

View File

@ -7,5 +7,5 @@
"integration_type": "hub", "integration_type": "hub",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["aioairq"], "loggers": ["aioairq"],
"requirements": ["aioairq==0.3.2"] "requirements": ["aioairq==0.4.3"]
} }

View File

@ -39,45 +39,54 @@ SENSORS: dict[str, SensorEntityDescription] = {
key="temp", key="temp",
device_class=SensorDeviceClass.TEMPERATURE, device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS, native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
), ),
"humidity": SensorEntityDescription( "humidity": SensorEntityDescription(
key="humidity", key="humidity",
device_class=SensorDeviceClass.HUMIDITY, device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE, native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
), ),
"pressure": SensorEntityDescription( "pressure": SensorEntityDescription(
key="pressure", key="pressure",
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
native_unit_of_measurement=UnitOfPressure.MBAR, native_unit_of_measurement=UnitOfPressure.MBAR,
state_class=SensorStateClass.MEASUREMENT,
), ),
"battery": SensorEntityDescription( "battery": SensorEntityDescription(
key="battery", key="battery",
device_class=SensorDeviceClass.BATTERY, device_class=SensorDeviceClass.BATTERY,
native_unit_of_measurement=PERCENTAGE, native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
), ),
"co2": SensorEntityDescription( "co2": SensorEntityDescription(
key="co2", key="co2",
device_class=SensorDeviceClass.CO2, device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
), ),
"voc": SensorEntityDescription( "voc": SensorEntityDescription(
key="voc", key="voc",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS, device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION, native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
), ),
"light": SensorEntityDescription( "light": SensorEntityDescription(
key="light", key="light",
native_unit_of_measurement=PERCENTAGE, native_unit_of_measurement=PERCENTAGE,
translation_key="light", translation_key="light",
state_class=SensorStateClass.MEASUREMENT,
), ),
"virusRisk": SensorEntityDescription( "virusRisk": SensorEntityDescription(
key="virusRisk", key="virusRisk",
translation_key="virus_risk", translation_key="virus_risk",
state_class=SensorStateClass.MEASUREMENT,
), ),
"mold": SensorEntityDescription( "mold": SensorEntityDescription(
key="mold", key="mold",
translation_key="mold", translation_key="mold",
state_class=SensorStateClass.MEASUREMENT,
), ),
"rssi": SensorEntityDescription( "rssi": SensorEntityDescription(
key="rssi", key="rssi",
@ -85,16 +94,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
device_class=SensorDeviceClass.SIGNAL_STRENGTH, device_class=SensorDeviceClass.SIGNAL_STRENGTH,
entity_registry_enabled_default=False, entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
), ),
"pm1": SensorEntityDescription( "pm1": SensorEntityDescription(
key="pm1", key="pm1",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM1, device_class=SensorDeviceClass.PM1,
state_class=SensorStateClass.MEASUREMENT,
), ),
"pm25": SensorEntityDescription( "pm25": SensorEntityDescription(
key="pm25", key="pm25",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM25, device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
), ),
} }

View File

@ -95,7 +95,6 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
| ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_ON
) )
_attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_temperature_unit = UnitOfTemperature.CELSIUS
_enable_turn_on_off_backwards_compatibility = False
def __init__(self, coordinator, ac_number, info): def __init__(self, coordinator, ac_number, info):
"""Initialize the climate device.""" """Initialize the climate device."""
@ -205,7 +204,6 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
) )
_attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_hvac_modes = AT_GROUP_MODES _attr_hvac_modes = AT_GROUP_MODES
_enable_turn_on_off_backwards_compatibility = False
def __init__(self, coordinator, group_number, info): def __init__(self, coordinator, group_number, info):
"""Initialize the climate device.""" """Initialize the climate device."""

View File

@ -124,7 +124,6 @@ class Airtouch5ClimateEntity(ClimateEntity, Airtouch5Entity):
_attr_translation_key = DOMAIN _attr_translation_key = DOMAIN
_attr_target_temperature_step = 1 _attr_target_temperature_step = 1
_attr_name = None _attr_name = None
_enable_turn_on_off_backwards_compatibility = False
class Airtouch5AC(Airtouch5ClimateEntity): class Airtouch5AC(Airtouch5ClimateEntity):

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/airtouch5", "documentation": "https://www.home-assistant.io/integrations/airtouch5",
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["airtouch5py"], "loggers": ["airtouch5py"],
"requirements": ["airtouch5py==0.2.10"] "requirements": ["airtouch5py==0.2.11"]
} }

View File

@ -136,7 +136,6 @@ class AirzoneClimate(AirzoneZoneEntity, ClimateEntity):
_attr_name = None _attr_name = None
_speeds: dict[int, str] = {} _speeds: dict[int, str] = {}
_speeds_reverse: dict[str, int] = {} _speeds_reverse: dict[str, int] = {}
_enable_turn_on_off_backwards_compatibility = False
def __init__( def __init__(
self, self,

View File

@ -11,5 +11,5 @@
"documentation": "https://www.home-assistant.io/integrations/airzone", "documentation": "https://www.home-assistant.io/integrations/airzone",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["aioairzone"], "loggers": ["aioairzone"],
"requirements": ["aioairzone==0.9.5"] "requirements": ["aioairzone==0.9.7"]
} }

View File

@ -177,7 +177,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
_attr_name = None _attr_name = None
_attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_temperature_unit = UnitOfTemperature.CELSIUS
_enable_turn_on_off_backwards_compatibility = False
def _init_attributes(self) -> None: def _init_attributes(self) -> None:
"""Init common climate device attributes.""" """Init common climate device attributes."""
@ -194,12 +193,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
) )
if (
self.get_airzone_value(AZD_SPEED) is not None
and self.get_airzone_value(AZD_SPEEDS) is not None
):
self._initialize_fan_speeds()
@callback @callback
def _handle_coordinator_update(self) -> None: def _handle_coordinator_update(self) -> None:
"""Update attributes when the coordinator updates.""" """Update attributes when the coordinator updates."""
@ -214,8 +207,6 @@ class AirzoneClimate(AirzoneEntity, ClimateEntity):
self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[ self._attr_hvac_action = HVAC_ACTION_LIB_TO_HASS[
self.get_airzone_value(AZD_ACTION) self.get_airzone_value(AZD_ACTION)
] ]
if self.supported_features & ClimateEntityFeature.FAN_MODE:
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
if self.get_airzone_value(AZD_POWER): if self.get_airzone_value(AZD_POWER):
self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[ self._attr_hvac_mode = HVAC_MODE_LIB_TO_HASS[
self.get_airzone_value(AZD_MODE) self.get_airzone_value(AZD_MODE)
@ -252,6 +243,22 @@ class AirzoneDeviceClimate(AirzoneClimate):
_speeds: dict[int, str] _speeds: dict[int, str]
_speeds_reverse: dict[str, int] _speeds_reverse: dict[str, int]
def _init_attributes(self) -> None:
"""Init common climate device attributes."""
super()._init_attributes()
if (
self.get_airzone_value(AZD_SPEED) is not None
and self.get_airzone_value(AZD_SPEEDS) is not None
):
self._initialize_fan_speeds()
@callback
def _async_update_attrs(self) -> None:
"""Update climate attributes."""
super()._async_update_attrs()
if self.supported_features & ClimateEntityFeature.FAN_MODE:
self._attr_fan_mode = self._speeds.get(self.get_airzone_value(AZD_SPEED))
def _initialize_fan_speeds(self) -> None: def _initialize_fan_speeds(self) -> None:
"""Initialize fan speeds.""" """Initialize fan speeds."""
azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS) azd_speeds: dict[int, int] = self.get_airzone_value(AZD_SPEEDS)

View File

@ -4,9 +4,8 @@ from __future__ import annotations
import asyncio import asyncio
from datetime import timedelta from datetime import timedelta
from functools import partial
import logging import logging
from typing import Any, Final, final from typing import TYPE_CHECKING, Any, Final, final
from propcache import cached_property from propcache import cached_property
import voluptuous as vol import voluptuous as vol
@ -27,26 +26,14 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ServiceValidationError from homeassistant.exceptions import ServiceValidationError
import homeassistant.helpers.config_validation as cv import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import make_entity_service_schema from homeassistant.helpers.config_validation import make_entity_service_schema
from homeassistant.helpers.deprecation import (
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity_platform import EntityPlatform from homeassistant.helpers.entity_platform import EntityPlatform
from homeassistant.helpers.frame import ReportBehavior, report_usage
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey from homeassistant.util.hass_dict import HassKey
from .const import ( # noqa: F401 from .const import (
_DEPRECATED_FORMAT_NUMBER,
_DEPRECATED_FORMAT_TEXT,
_DEPRECATED_SUPPORT_ALARM_ARM_AWAY,
_DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
_DEPRECATED_SUPPORT_ALARM_ARM_HOME,
_DEPRECATED_SUPPORT_ALARM_ARM_NIGHT,
_DEPRECATED_SUPPORT_ALARM_ARM_VACATION,
_DEPRECATED_SUPPORT_ALARM_TRIGGER,
ATTR_CHANGED_BY, ATTR_CHANGED_BY,
ATTR_CODE_ARM_REQUIRED, ATTR_CODE_ARM_REQUIRED,
DOMAIN, DOMAIN,
@ -163,7 +150,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
_alarm_control_panel_option_default_code: str | None = None _alarm_control_panel_option_default_code: str | None = None
__alarm_legacy_state: bool = False __alarm_legacy_state: bool = False
__alarm_legacy_state_reported: bool = False
def __init_subclass__(cls, **kwargs: Any) -> None: def __init_subclass__(cls, **kwargs: Any) -> None:
"""Post initialisation processing.""" """Post initialisation processing."""
@ -173,17 +159,15 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
# setting the state directly. # setting the state directly.
cls.__alarm_legacy_state = True cls.__alarm_legacy_state = True
def __setattr__(self, __name: str, __value: Any) -> None: def __setattr__(self, name: str, value: Any, /) -> None:
"""Set attribute. """Set attribute.
Deprecation warning if setting '_attr_state' directly Deprecation warning if setting '_attr_state' directly
unless already reported. unless already reported.
""" """
if __name == "_attr_state": if name == "_attr_state":
if self.__alarm_legacy_state_reported is not True: self._report_deprecated_alarm_state_handling()
self._report_deprecated_alarm_state_handling() return super().__setattr__(name, value)
self.__alarm_legacy_state_reported = True
return super().__setattr__(__name, __value)
@callback @callback
def add_to_platform_start( def add_to_platform_start(
@ -194,7 +178,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
) -> None: ) -> None:
"""Start adding an entity to a platform.""" """Start adding an entity to a platform."""
super().add_to_platform_start(hass, platform, parallel_updates) super().add_to_platform_start(hass, platform, parallel_updates)
if self.__alarm_legacy_state and not self.__alarm_legacy_state_reported: if self.__alarm_legacy_state:
self._report_deprecated_alarm_state_handling() self._report_deprecated_alarm_state_handling()
@callback @callback
@ -203,27 +187,30 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
Integrations should implement alarm_state instead of using state directly. Integrations should implement alarm_state instead of using state directly.
""" """
self.__alarm_legacy_state_reported = True report_usage(
if "custom_components" in type(self).__module__: "is setting state directly."
# Do not report on core integrations as they have been fixed. f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
report_issue = "report it to the custom integration author." " property and return its state using the AlarmControlPanelState enum",
_LOGGER.warning( core_integration_behavior=ReportBehavior.ERROR,
"Entity %s (%s) is setting state directly" custom_integration_behavior=ReportBehavior.LOG,
" which will stop working in HA Core 2025.11." breaks_in_ha_version="2025.11",
" Entities should implement the 'alarm_state' property and" integration_domain=self.platform.platform_name if self.platform else None,
" return its state using the AlarmControlPanelState enum, please %s", exclude_integrations={DOMAIN},
self.entity_id, )
type(self),
report_issue,
)
@final @final
@property @property
def state(self) -> str | None: def state(self) -> str | None:
"""Return the current state.""" """Return the current state."""
if (alarm_state := self.alarm_state) is None: if (alarm_state := self.alarm_state) is not None:
return None return alarm_state
return alarm_state if self._attr_state is not None:
# Backwards compatibility for integrations that set state directly
# Should be removed in 2025.11
if TYPE_CHECKING:
assert isinstance(self._attr_state, str)
return self._attr_state
return None
@cached_property @cached_property
def alarm_state(self) -> AlarmControlPanelState | None: def alarm_state(self) -> AlarmControlPanelState | None:
@ -269,7 +256,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
"""Check if arm code is required, raise if no code is given.""" """Check if arm code is required, raise if no code is given."""
if not (_code := self.code_or_default_code(code)) and self.code_arm_required: if not (_code := self.code_or_default_code(code)) and self.code_arm_required:
raise ServiceValidationError( raise ServiceValidationError(
f"Arming requires a code but none was given for {self.entity_id}",
translation_domain=DOMAIN, translation_domain=DOMAIN,
translation_key="code_arm_required", translation_key="code_arm_required",
translation_placeholders={ translation_placeholders={
@ -369,12 +355,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
@cached_property @cached_property
def supported_features(self) -> AlarmControlPanelEntityFeature: def supported_features(self) -> AlarmControlPanelEntityFeature:
"""Return the list of supported features.""" """Return the list of supported features."""
features = self._attr_supported_features return self._attr_supported_features
if type(features) is int: # noqa: E721
new_features = AlarmControlPanelEntityFeature(features)
self._report_deprecated_supported_features_values(new_features)
return new_features
return features
@final @final
@property @property
@ -412,13 +393,3 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
self._alarm_control_panel_option_default_code = default_code self._alarm_control_panel_option_default_code = default_code
return return
self._alarm_control_panel_option_default_code = None self._alarm_control_panel_option_default_code = None
# As we import constants of the const module here, we need to add the following
# functions to check for deprecated constants again
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@ -1,16 +1,8 @@
"""Provides the constants needed for component.""" """Provides the constants needed for component."""
from enum import IntFlag, StrEnum from enum import IntFlag, StrEnum
from functools import partial
from typing import Final from typing import Final
from homeassistant.helpers.deprecation import (
DeprecatedConstantEnum,
all_with_deprecated_constants,
check_if_deprecated_constant,
dir_with_deprecated_constants,
)
DOMAIN: Final = "alarm_control_panel" DOMAIN: Final = "alarm_control_panel"
ATTR_CHANGED_BY: Final = "changed_by" ATTR_CHANGED_BY: Final = "changed_by"
@ -39,12 +31,6 @@ class CodeFormat(StrEnum):
NUMBER = "number" NUMBER = "number"
# These constants are deprecated as of Home Assistant 2022.5, can be removed in 2025.1
# Please use the CodeFormat enum instead.
_DEPRECATED_FORMAT_TEXT: Final = DeprecatedConstantEnum(CodeFormat.TEXT, "2025.1")
_DEPRECATED_FORMAT_NUMBER: Final = DeprecatedConstantEnum(CodeFormat.NUMBER, "2025.1")
class AlarmControlPanelEntityFeature(IntFlag): class AlarmControlPanelEntityFeature(IntFlag):
"""Supported features of the alarm control panel entity.""" """Supported features of the alarm control panel entity."""
@ -56,27 +42,6 @@ class AlarmControlPanelEntityFeature(IntFlag):
ARM_VACATION = 32 ARM_VACATION = 32
# These constants are deprecated as of Home Assistant 2022.5
# Please use the AlarmControlPanelEntityFeature enum instead.
_DEPRECATED_SUPPORT_ALARM_ARM_HOME: Final = DeprecatedConstantEnum(
AlarmControlPanelEntityFeature.ARM_HOME, "2025.1"
)
_DEPRECATED_SUPPORT_ALARM_ARM_AWAY: Final = DeprecatedConstantEnum(
AlarmControlPanelEntityFeature.ARM_AWAY, "2025.1"
)
_DEPRECATED_SUPPORT_ALARM_ARM_NIGHT: Final = DeprecatedConstantEnum(
AlarmControlPanelEntityFeature.ARM_NIGHT, "2025.1"
)
_DEPRECATED_SUPPORT_ALARM_TRIGGER: Final = DeprecatedConstantEnum(
AlarmControlPanelEntityFeature.TRIGGER, "2025.1"
)
_DEPRECATED_SUPPORT_ALARM_ARM_CUSTOM_BYPASS: Final = DeprecatedConstantEnum(
AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, "2025.1"
)
_DEPRECATED_SUPPORT_ALARM_ARM_VACATION: Final = DeprecatedConstantEnum(
AlarmControlPanelEntityFeature.ARM_VACATION, "2025.1"
)
CONDITION_TRIGGERED: Final = "is_triggered" CONDITION_TRIGGERED: Final = "is_triggered"
CONDITION_DISARMED: Final = "is_disarmed" CONDITION_DISARMED: Final = "is_disarmed"
CONDITION_ARMED_HOME: Final = "is_armed_home" CONDITION_ARMED_HOME: Final = "is_armed_home"
@ -84,10 +49,3 @@ CONDITION_ARMED_AWAY: Final = "is_armed_away"
CONDITION_ARMED_NIGHT: Final = "is_armed_night" CONDITION_ARMED_NIGHT: Final = "is_armed_night"
CONDITION_ARMED_VACATION: Final = "is_armed_vacation" CONDITION_ARMED_VACATION: Final = "is_armed_vacation"
CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass" CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass"
# These can be removed if no deprecated constant are in this module anymore
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
__dir__ = partial(
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
)
__all__ = all_with_deprecated_constants(globals())

View File

@ -130,7 +130,7 @@
}, },
"alarm_trigger": { "alarm_trigger": {
"name": "Trigger", "name": "Trigger",
"description": "Enables an external alarm trigger.", "description": "Trigger the alarm manually.",
"fields": { "fields": {
"code": { "code": {
"name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]", "name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]",
@ -138,5 +138,10 @@
} }
} }
} }
},
"exceptions": {
"code_arm_required": {
"message": "Arming requires a code but none was given for {entity_id}."
}
} }
} }

View File

@ -317,6 +317,7 @@ class Alexa(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -403,6 +404,7 @@ class AlexaPowerController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -436,7 +438,7 @@ class AlexaPowerController(AlexaCapability):
elif self.entity.domain == remote.DOMAIN: elif self.entity.domain == remote.DOMAIN:
is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN) is_on = self.entity.state not in (STATE_OFF, STATE_UNKNOWN)
elif self.entity.domain == vacuum.DOMAIN: elif self.entity.domain == vacuum.DOMAIN:
is_on = self.entity.state == vacuum.STATE_CLEANING is_on = self.entity.state == vacuum.VacuumActivity.CLEANING
elif self.entity.domain == timer.DOMAIN: elif self.entity.domain == timer.DOMAIN:
is_on = self.entity.state != STATE_IDLE is_on = self.entity.state != STATE_IDLE
elif self.entity.domain == water_heater.DOMAIN: elif self.entity.domain == water_heater.DOMAIN:
@ -469,6 +471,7 @@ class AlexaLockController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -523,6 +526,7 @@ class AlexaSceneController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -562,6 +566,7 @@ class AlexaBrightnessController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -611,6 +616,7 @@ class AlexaColorController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -669,6 +675,7 @@ class AlexaColorTemperatureController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -715,6 +722,7 @@ class AlexaSpeaker(AlexaCapability):
"fr-FR", # Not documented as of 2021-12-04, see PR #60489 "fr-FR", # Not documented as of 2021-12-04, see PR #60489
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
} }
def name(self) -> str: def name(self) -> str:
@ -772,6 +780,7 @@ class AlexaStepSpeaker(AlexaCapability):
"es-ES", "es-ES",
"fr-FR", # Not documented as of 2021-12-04, see PR #60489 "fr-FR", # Not documented as of 2021-12-04, see PR #60489
"it-IT", "it-IT",
"nl-NL",
} }
def name(self) -> str: def name(self) -> str:
@ -801,6 +810,7 @@ class AlexaPlaybackController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -816,13 +826,19 @@ class AlexaPlaybackController(AlexaCapability):
""" """
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
operations = { operations: dict[
media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next", cover.CoverEntityFeature | media_player.MediaPlayerEntityFeature, str
media_player.MediaPlayerEntityFeature.PAUSE: "Pause", ]
media_player.MediaPlayerEntityFeature.PLAY: "Play", if self.entity.domain == cover.DOMAIN:
media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous", operations = {cover.CoverEntityFeature.STOP: "Stop"}
media_player.MediaPlayerEntityFeature.STOP: "Stop", else:
} operations = {
media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next",
media_player.MediaPlayerEntityFeature.PAUSE: "Pause",
media_player.MediaPlayerEntityFeature.PLAY: "Play",
media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous",
media_player.MediaPlayerEntityFeature.STOP: "Stop",
}
return [ return [
value value
@ -853,6 +869,7 @@ class AlexaInputController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -1098,6 +1115,7 @@ class AlexaThermostatController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -1239,6 +1257,7 @@ class AlexaPowerLevelController(AlexaCapability):
"fr-CA", "fr-CA",
"fr-FR", "fr-FR",
"it-IT", "it-IT",
"nl-NL",
"ja-JP", "ja-JP",
} }
@ -1717,6 +1736,7 @@ class AlexaRangeController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -2060,6 +2080,7 @@ class AlexaToggleController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -2206,6 +2227,7 @@ class AlexaPlaybackStateReporter(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -2261,6 +2283,7 @@ class AlexaSeekController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -2354,6 +2377,7 @@ class AlexaEqualizerController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }
@ -2464,6 +2488,7 @@ class AlexaCameraStreamController(AlexaCapability):
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
} }

View File

@ -59,6 +59,7 @@ CONF_SUPPORTED_LOCALES = (
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
) )

View File

@ -559,6 +559,10 @@ class CoverCapabilities(AlexaEntity):
) )
if supported & cover.CoverEntityFeature.SET_TILT_POSITION: if supported & cover.CoverEntityFeature.SET_TILT_POSITION:
yield AlexaRangeController(self.entity, instance=f"{cover.DOMAIN}.tilt") yield AlexaRangeController(self.entity, instance=f"{cover.DOMAIN}.tilt")
if supported & (
cover.CoverEntityFeature.STOP | cover.CoverEntityFeature.STOP_TILT
):
yield AlexaPlaybackController(self.entity, instance=f"{cover.DOMAIN}.stop")
yield AlexaEndpointHealth(self.hass, self.entity) yield AlexaEndpointHealth(self.hass, self.entity)
yield Alexa(self.entity) yield Alexa(self.entity)

View File

@ -2,6 +2,7 @@
from __future__ import annotations from __future__ import annotations
import asyncio
from collections.abc import Callable, Coroutine from collections.abc import Callable, Coroutine
import logging import logging
import math import math
@ -358,7 +359,7 @@ async def async_api_set_color_temperature(
await hass.services.async_call( await hass.services.async_call(
entity.domain, entity.domain,
SERVICE_TURN_ON, SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_KELVIN: kelvin}, {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: kelvin},
blocking=False, blocking=False,
context=context, context=context,
) )
@ -375,14 +376,14 @@ async def async_api_decrease_color_temp(
) -> AlexaResponse: ) -> AlexaResponse:
"""Process a decrease color temperature request.""" """Process a decrease color temperature request."""
entity = directive.entity entity = directive.entity
current = int(entity.attributes[light.ATTR_COLOR_TEMP]) current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
max_mireds = int(entity.attributes[light.ATTR_MAX_MIREDS]) min_kelvin = int(entity.attributes[light.ATTR_MIN_COLOR_TEMP_KELVIN])
value = min(max_mireds, current + 50) value = max(min_kelvin, current - 500)
await hass.services.async_call( await hass.services.async_call(
entity.domain, entity.domain,
SERVICE_TURN_ON, SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
blocking=False, blocking=False,
context=context, context=context,
) )
@ -399,14 +400,14 @@ async def async_api_increase_color_temp(
) -> AlexaResponse: ) -> AlexaResponse:
"""Process an increase color temperature request.""" """Process an increase color temperature request."""
entity = directive.entity entity = directive.entity
current = int(entity.attributes[light.ATTR_COLOR_TEMP]) current = int(entity.attributes[light.ATTR_COLOR_TEMP_KELVIN])
min_mireds = int(entity.attributes[light.ATTR_MIN_MIREDS]) max_kelvin = int(entity.attributes[light.ATTR_MAX_COLOR_TEMP_KELVIN])
value = max(min_mireds, current - 50) value = min(max_kelvin, current + 500)
await hass.services.async_call( await hass.services.async_call(
entity.domain, entity.domain,
SERVICE_TURN_ON, SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP: value}, {ATTR_ENTITY_ID: entity.entity_id, light.ATTR_COLOR_TEMP_KELVIN: value},
blocking=False, blocking=False,
context=context, context=context,
) )
@ -526,6 +527,7 @@ async def async_api_unlock(
"hi-IN", "hi-IN",
"it-IT", "it-IT",
"ja-JP", "ja-JP",
"nl-NL",
"pt-BR", "pt-BR",
}: }:
msg = ( msg = (
@ -764,9 +766,25 @@ async def async_api_stop(
entity = directive.entity entity = directive.entity
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id} data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
await hass.services.async_call( if entity.domain == cover.DOMAIN:
entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context supported: int = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
) feature_services: dict[int, str] = {
cover.CoverEntityFeature.STOP.value: cover.SERVICE_STOP_COVER,
cover.CoverEntityFeature.STOP_TILT.value: cover.SERVICE_STOP_COVER_TILT,
}
await asyncio.gather(
*(
hass.services.async_call(
entity.domain, service, data, blocking=False, context=context
)
for feature, service in feature_services.items()
if feature & supported
)
)
else:
await hass.services.async_call(
entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context
)
return directive.response() return directive.response()

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/alpha_vantage", "documentation": "https://www.home-assistant.io/integrations/alpha_vantage",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["alpha_vantage"], "loggers": ["alpha_vantage"],
"quality_scale": "legacy",
"requirements": ["alpha-vantage==2.3.1"] "requirements": ["alpha-vantage==2.3.1"]
} }

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/amazon_polly", "documentation": "https://www.home-assistant.io/integrations/amazon_polly",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["boto3", "botocore", "s3transfer"], "loggers": ["boto3", "botocore", "s3transfer"],
"quality_scale": "legacy",
"requirements": ["boto3==1.34.131"] "requirements": ["boto3==1.34.131"]
} }

View File

@ -1,7 +1,6 @@
"""Support for Amber Electric.""" """Support for Amber Electric."""
from amberelectric import Configuration import amberelectric
from amberelectric.api import amber_api
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_TOKEN from homeassistant.const import CONF_API_TOKEN
@ -15,8 +14,9 @@ type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator]
async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool:
"""Set up Amber Electric from a config entry.""" """Set up Amber Electric from a config entry."""
configuration = Configuration(access_token=entry.data[CONF_API_TOKEN]) configuration = amberelectric.Configuration(access_token=entry.data[CONF_API_TOKEN])
api_instance = amber_api.AmberApi.create(configuration) api_client = amberelectric.ApiClient(configuration)
api_instance = amberelectric.AmberApi(api_client)
site_id = entry.data[CONF_SITE_ID] site_id = entry.data[CONF_SITE_ID]
coordinator = AmberUpdateCoordinator(hass, api_instance, site_id) coordinator = AmberUpdateCoordinator(hass, api_instance, site_id)

View File

@ -3,8 +3,8 @@
from __future__ import annotations from __future__ import annotations
import amberelectric import amberelectric
from amberelectric.api import amber_api from amberelectric.models.site import Site
from amberelectric.model.site import Site, SiteStatus from amberelectric.models.site_status import SiteStatus
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
@ -23,11 +23,15 @@ API_URL = "https://app.amber.com.au/developers"
def generate_site_selector_name(site: Site) -> str: def generate_site_selector_name(site: Site) -> str:
"""Generate the name to show in the site drop down in the configuration flow.""" """Generate the name to show in the site drop down in the configuration flow."""
# For some reason the generated API key returns this as any, not a string. Thanks pydantic
nmi = str(site.nmi)
if site.status == SiteStatus.CLOSED: if site.status == SiteStatus.CLOSED:
return site.nmi + " (Closed: " + site.closed_on.isoformat() + ")" # type: ignore[no-any-return] if site.closed_on is None:
return f"{nmi} (Closed)"
return f"{nmi} (Closed: {site.closed_on.isoformat()})"
if site.status == SiteStatus.PENDING: if site.status == SiteStatus.PENDING:
return site.nmi + " (Pending)" # type: ignore[no-any-return] return f"{nmi} (Pending)"
return site.nmi # type: ignore[no-any-return] return nmi
def filter_sites(sites: list[Site]) -> list[Site]: def filter_sites(sites: list[Site]) -> list[Site]:
@ -35,7 +39,7 @@ def filter_sites(sites: list[Site]) -> list[Site]:
filtered: list[Site] = [] filtered: list[Site] = []
filtered_nmi: set[str] = set() filtered_nmi: set[str] = set()
for site in sorted(sites, key=lambda site: site.status.value): for site in sorted(sites, key=lambda site: site.status):
if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi: if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi:
filtered.append(site) filtered.append(site)
filtered_nmi.add(site.nmi) filtered_nmi.add(site.nmi)
@ -56,7 +60,8 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN):
def _fetch_sites(self, token: str) -> list[Site] | None: def _fetch_sites(self, token: str) -> list[Site] | None:
configuration = amberelectric.Configuration(access_token=token) configuration = amberelectric.Configuration(access_token=token)
api: amber_api.AmberApi = amber_api.AmberApi.create(configuration) api_client = amberelectric.ApiClient(configuration)
api = amberelectric.AmberApi(api_client)
try: try:
sites: list[Site] = filter_sites(api.get_sites()) sites: list[Site] = filter_sites(api.get_sites())

View File

@ -5,13 +5,13 @@ from __future__ import annotations
from datetime import timedelta from datetime import timedelta
from typing import Any from typing import Any
from amberelectric import ApiException import amberelectric
from amberelectric.api import amber_api from amberelectric.models.actual_interval import ActualInterval
from amberelectric.model.actual_interval import ActualInterval from amberelectric.models.channel import ChannelType
from amberelectric.model.channel import ChannelType from amberelectric.models.current_interval import CurrentInterval
from amberelectric.model.current_interval import CurrentInterval from amberelectric.models.forecast_interval import ForecastInterval
from amberelectric.model.forecast_interval import ForecastInterval from amberelectric.models.price_descriptor import PriceDescriptor
from amberelectric.model.interval import Descriptor from amberelectric.rest import ApiException
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
@ -31,22 +31,22 @@ def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) -
def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is on the general channel.""" """Return true if the supplied interval is on the general channel."""
return interval.channel_type == ChannelType.GENERAL # type: ignore[no-any-return] return interval.channel_type == ChannelType.GENERAL
def is_controlled_load( def is_controlled_load(
interval: ActualInterval | CurrentInterval | ForecastInterval, interval: ActualInterval | CurrentInterval | ForecastInterval,
) -> bool: ) -> bool:
"""Return true if the supplied interval is on the controlled load channel.""" """Return true if the supplied interval is on the controlled load channel."""
return interval.channel_type == ChannelType.CONTROLLED_LOAD # type: ignore[no-any-return] return interval.channel_type == ChannelType.CONTROLLEDLOAD
def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool:
"""Return true if the supplied interval is on the feed in channel.""" """Return true if the supplied interval is on the feed in channel."""
return interval.channel_type == ChannelType.FEED_IN # type: ignore[no-any-return] return interval.channel_type == ChannelType.FEEDIN
def normalize_descriptor(descriptor: Descriptor) -> str | None: def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None:
"""Return the snake case versions of descriptor names. Returns None if the name is not recognized.""" """Return the snake case versions of descriptor names. Returns None if the name is not recognized."""
if descriptor is None: if descriptor is None:
return None return None
@ -71,7 +71,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
"""AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read.""" """AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read."""
def __init__( def __init__(
self, hass: HomeAssistant, api: amber_api.AmberApi, site_id: str self, hass: HomeAssistant, api: amberelectric.AmberApi, site_id: str
) -> None: ) -> None:
"""Initialise the data service.""" """Initialise the data service."""
super().__init__( super().__init__(
@ -93,12 +93,13 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
"grid": {}, "grid": {},
} }
try: try:
data = self._api.get_current_price(self.site_id, next=48) data = self._api.get_current_prices(self.site_id, next=48)
intervals = [interval.actual_instance for interval in data]
except ApiException as api_exception: except ApiException as api_exception:
raise UpdateFailed("Missing price data, skipping update") from api_exception raise UpdateFailed("Missing price data, skipping update") from api_exception
current = [interval for interval in data if is_current(interval)] current = [interval for interval in intervals if is_current(interval)]
forecasts = [interval for interval in data if is_forecast(interval)] forecasts = [interval for interval in intervals if is_forecast(interval)]
general = [interval for interval in current if is_general(interval)] general = [interval for interval in current if is_general(interval)]
if len(general) == 0: if len(general) == 0:
@ -137,7 +138,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator):
interval for interval in forecasts if is_feed_in(interval) interval for interval in forecasts if is_feed_in(interval)
] ]
LOGGER.debug("Fetched new Amber data: %s", data) LOGGER.debug("Fetched new Amber data: %s", intervals)
return result return result
async def _async_update_data(self) -> dict[str, Any]: async def _async_update_data(self) -> dict[str, Any]:

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/amberelectric", "documentation": "https://www.home-assistant.io/integrations/amberelectric",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["amberelectric"], "loggers": ["amberelectric"],
"requirements": ["amberelectric==1.1.1"] "requirements": ["amberelectric==2.0.12"]
} }

View File

@ -8,9 +8,9 @@ from __future__ import annotations
from typing import Any from typing import Any
from amberelectric.model.channel import ChannelType from amberelectric.models.channel import ChannelType
from amberelectric.model.current_interval import CurrentInterval from amberelectric.models.current_interval import CurrentInterval
from amberelectric.model.forecast_interval import ForecastInterval from amberelectric.models.forecast_interval import ForecastInterval
from homeassistant.components.sensor import ( from homeassistant.components.sensor import (
SensorEntity, SensorEntity,
@ -52,7 +52,7 @@ class AmberSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity):
self, self,
coordinator: AmberUpdateCoordinator, coordinator: AmberUpdateCoordinator,
description: SensorEntityDescription, description: SensorEntityDescription,
channel_type: ChannelType, channel_type: str,
) -> None: ) -> None:
"""Initialize the Sensor.""" """Initialize the Sensor."""
super().__init__(coordinator) super().__init__(coordinator)
@ -73,7 +73,7 @@ class AmberPriceSensor(AmberSensor):
"""Return the current price in $/kWh.""" """Return the current price in $/kWh."""
interval = self.coordinator.data[self.entity_description.key][self.channel_type] interval = self.coordinator.data[self.entity_description.key][self.channel_type]
if interval.channel_type == ChannelType.FEED_IN: if interval.channel_type == ChannelType.FEEDIN:
return format_cents_to_dollars(interval.per_kwh) * -1 return format_cents_to_dollars(interval.per_kwh) * -1
return format_cents_to_dollars(interval.per_kwh) return format_cents_to_dollars(interval.per_kwh)
@ -87,9 +87,9 @@ class AmberPriceSensor(AmberSensor):
return data return data
data["duration"] = interval.duration data["duration"] = interval.duration
data["date"] = interval.date.isoformat() data["date"] = interval.var_date.isoformat()
data["per_kwh"] = format_cents_to_dollars(interval.per_kwh) data["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
if interval.channel_type == ChannelType.FEED_IN: if interval.channel_type == ChannelType.FEEDIN:
data["per_kwh"] = data["per_kwh"] * -1 data["per_kwh"] = data["per_kwh"] * -1
data["nem_date"] = interval.nem_time.isoformat() data["nem_date"] = interval.nem_time.isoformat()
data["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh) data["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
@ -120,7 +120,7 @@ class AmberForecastSensor(AmberSensor):
return None return None
interval = intervals[0] interval = intervals[0]
if interval.channel_type == ChannelType.FEED_IN: if interval.channel_type == ChannelType.FEEDIN:
return format_cents_to_dollars(interval.per_kwh) * -1 return format_cents_to_dollars(interval.per_kwh) * -1
return format_cents_to_dollars(interval.per_kwh) return format_cents_to_dollars(interval.per_kwh)
@ -142,10 +142,10 @@ class AmberForecastSensor(AmberSensor):
for interval in intervals: for interval in intervals:
datum = {} datum = {}
datum["duration"] = interval.duration datum["duration"] = interval.duration
datum["date"] = interval.date.isoformat() datum["date"] = interval.var_date.isoformat()
datum["nem_date"] = interval.nem_time.isoformat() datum["nem_date"] = interval.nem_time.isoformat()
datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh) datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh)
if interval.channel_type == ChannelType.FEED_IN: if interval.channel_type == ChannelType.FEEDIN:
datum["per_kwh"] = datum["per_kwh"] * -1 datum["per_kwh"] = datum["per_kwh"] * -1
datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh) datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh)
datum["start_time"] = interval.start_time.isoformat() datum["start_time"] = interval.start_time.isoformat()

View File

@ -6,5 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/amcrest", "documentation": "https://www.home-assistant.io/integrations/amcrest",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["amcrest"], "loggers": ["amcrest"],
"quality_scale": "legacy",
"requirements": ["amcrest==1.9.8"] "requirements": ["amcrest==1.9.8"]
} }

View File

@ -41,7 +41,7 @@
} }
}, },
"enable_motion_recording": { "enable_motion_recording": {
"name": "Enables motion recording", "name": "Enable motion recording",
"description": "Enables recording a clip to camera storage when motion is detected.", "description": "Enables recording a clip to camera storage when motion is detected.",
"fields": { "fields": {
"entity_id": { "entity_id": {
@ -51,8 +51,8 @@
} }
}, },
"disable_motion_recording": { "disable_motion_recording": {
"name": "Disables motion recording", "name": "Disable motion recording",
"description": "Disable recording a clip to camera storage when motion is detected.", "description": "Disables recording a clip to camera storage when motion is detected.",
"fields": { "fields": {
"entity_id": { "entity_id": {
"name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]", "name": "[%key:component::amcrest::services::enable_recording::fields::entity_id::name%]",

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/ampio", "documentation": "https://www.home-assistant.io/integrations/ampio",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["asmog"], "loggers": ["asmog"],
"quality_scale": "legacy",
"requirements": ["asmog==0.0.6"] "requirements": ["asmog==0.0.6"]
} }

View File

@ -11,12 +11,7 @@ from python_homeassistant_analytics import (
from python_homeassistant_analytics.models import IntegrationType from python_homeassistant_analytics.models import IntegrationType
import voluptuous as vol import voluptuous as vol
from homeassistant.config_entries import ( from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.core import callback from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import ( from homeassistant.helpers.selector import (
@ -25,6 +20,7 @@ from homeassistant.helpers.selector import (
SelectSelectorConfig, SelectSelectorConfig,
) )
from . import AnalyticsInsightsConfigEntry
from .const import ( from .const import (
CONF_TRACKED_ADDONS, CONF_TRACKED_ADDONS,
CONF_TRACKED_CUSTOM_INTEGRATIONS, CONF_TRACKED_CUSTOM_INTEGRATIONS,
@ -46,7 +42,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
@staticmethod @staticmethod
@callback @callback
def async_get_options_flow( def async_get_options_flow(
config_entry: ConfigEntry, config_entry: AnalyticsInsightsConfigEntry,
) -> HomeassistantAnalyticsOptionsFlowHandler: ) -> HomeassistantAnalyticsOptionsFlowHandler:
"""Get the options flow for this handler.""" """Get the options flow for this handler."""
return HomeassistantAnalyticsOptionsFlowHandler() return HomeassistantAnalyticsOptionsFlowHandler()

View File

@ -7,6 +7,6 @@
"integration_type": "service", "integration_type": "service",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["python_homeassistant_analytics"], "loggers": ["python_homeassistant_analytics"],
"requirements": ["python-homeassistant-analytics==0.8.0"], "requirements": ["python-homeassistant-analytics==0.8.1"],
"single_config_entry": true "single_config_entry": true
} }

View File

@ -0,0 +1,100 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
This integration does not provide additional actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
This integration does not provide additional actions.
docs-high-level-description: todo
docs-installation-instructions: todo
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: |
Entities of this integration does not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
This integration does not provide actions.
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable:
status: done
comment: |
The coordinator handles this.
integration-owner: done
log-when-unavailable:
status: done
comment: |
The coordinator handles this.
parallel-updates: todo
reauthentication-flow:
status: exempt
comment: |
This integration does not require authentication.
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: |
This integration is a cloud service and thus does not support discovery.
discovery:
status: exempt
comment: |
This integration is a cloud service and thus does not support discovery.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: |
This integration has a fixed single service.
entity-category: done
entity-device-class:
status: exempt
comment: |
This integration does not have entities with device classes.
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow:
status: exempt
comment: All the options of this integration are managed via the options flow
repair-issues:
status: exempt
comment: |
This integration doesn't have any cases where raising an issue is needed.
stale-devices:
status: exempt
comment: |
This integration has a fixed single service.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@ -110,7 +110,7 @@ def _setup_androidtv(
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'" adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
else: else:
# Use "pure-python-adb" (communicate with ADB server) # Communicate via ADB server
signer = None signer = None
adb_log = ( adb_log = (
"using ADB server at" "using ADB server at"
@ -135,15 +135,16 @@ async def async_connect_androidtv(
) )
aftv = await async_androidtv_setup( aftv = await async_androidtv_setup(
config[CONF_HOST], host=config[CONF_HOST],
config[CONF_PORT], port=config[CONF_PORT],
adbkey, adbkey=adbkey,
config.get(CONF_ADB_SERVER_IP), adb_server_ip=config.get(CONF_ADB_SERVER_IP),
config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT), adb_server_port=config.get(CONF_ADB_SERVER_PORT, DEFAULT_ADB_SERVER_PORT),
state_detection_rules, state_detection_rules=state_detection_rules,
config[CONF_DEVICE_CLASS], device_class=config[CONF_DEVICE_CLASS],
timeout, auth_timeout_s=timeout,
signer, signer=signer,
log_errors=False,
) )
if not aftv.available: if not aftv.available:

View File

@ -151,5 +151,5 @@ class AndroidTVEntity(Entity):
# Using "adb_shell" (Python ADB implementation) # Using "adb_shell" (Python ADB implementation)
self.exceptions = ADB_PYTHON_EXCEPTIONS self.exceptions = ADB_PYTHON_EXCEPTIONS
else: else:
# Using "pure-python-adb" (communicate with ADB server) # Communicate via ADB server
self.exceptions = ADB_TCP_EXCEPTIONS self.exceptions = ADB_TCP_EXCEPTIONS

View File

@ -6,10 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/androidtv", "documentation": "https://www.home-assistant.io/integrations/androidtv",
"integration_type": "device", "integration_type": "device",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["adb_shell", "androidtv", "pure_python_adb"], "loggers": ["adb_shell", "androidtv"],
"requirements": [ "requirements": ["adb-shell[async]==0.4.4", "androidtv[async]==0.0.75"]
"adb-shell[async]==0.4.4",
"androidtv[async]==0.0.73",
"pure-python-adb[async]==0.3.0.dev0"
]
} }

View File

@ -21,7 +21,7 @@
}, },
"abort": { "abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"invalid_unique_id": "Impossible to determine a valid unique id for the device" "invalid_unique_id": "Impossible to determine a valid unique ID for the device"
} }
}, },
"options": { "options": {
@ -38,17 +38,17 @@
} }
}, },
"apps": { "apps": {
"title": "Configure Android Apps", "title": "Configure Android apps",
"description": "Configure application id {app_id}", "description": "Configure application ID {app_id}",
"data": { "data": {
"app_name": "Application Name", "app_name": "Application name",
"app_id": "Application ID", "app_id": "Application ID",
"app_delete": "Check to delete this application" "app_delete": "Check to delete this application"
} }
}, },
"rules": { "rules": {
"title": "Configure Android state detection rules", "title": "Configure Android state detection rules",
"description": "Configure detection rule for application id {rule_id}", "description": "Configure detection rule for application ID {rule_id}",
"data": { "data": {
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]", "rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
"rule_values": "List of state detection rules (see documentation)", "rule_values": "List of state detection rules (see documentation)",

View File

@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
# and one of them, which could end up being in discovery_info.host, is from a # and one of them, which could end up being in discovery_info.host, is from a
# different device. If any of the discovery_info.ip_addresses matches the # different device. If any of the discovery_info.ip_addresses matches the
# existing host, don't update the host. # existing host, don't update the host.
if existing_config_entry and len(discovery_info.ip_addresses) > 1: if (
existing_config_entry
# Ignored entries don't have host
and CONF_HOST in existing_config_entry.data
and len(discovery_info.ip_addresses) > 1
):
existing_host = existing_config_entry.data[CONF_HOST] existing_host = existing_config_entry.data[CONF_HOST]
if existing_host != self.host: if existing_host != self.host:
if existing_host in [ if existing_host in [

View File

@ -7,7 +7,6 @@
"integration_type": "device", "integration_type": "device",
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["androidtvremote2"], "loggers": ["androidtvremote2"],
"quality_scale": "platinum",
"requirements": ["androidtvremote2==0.1.2"], "requirements": ["androidtvremote2==0.1.2"],
"zeroconf": ["_androidtvremote2._tcp.local."] "zeroconf": ["_androidtvremote2._tcp.local."]
} }

View File

@ -44,12 +44,12 @@
} }
}, },
"apps": { "apps": {
"title": "Configure Android Apps", "title": "Configure Android apps",
"description": "Configure application id {app_id}", "description": "Configure application ID {app_id}",
"data": { "data": {
"app_name": "Application Name", "app_name": "Application name",
"app_id": "Application ID", "app_id": "Application ID",
"app_icon": "Application Icon", "app_icon": "Application icon",
"app_delete": "Check to delete this application" "app_delete": "Check to delete this application"
} }
} }

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/anel_pwrctrl", "documentation": "https://www.home-assistant.io/integrations/anel_pwrctrl",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["anel_pwrctrl"], "loggers": ["anel_pwrctrl"],
"quality_scale": "legacy",
"requirements": ["anel-pwrctrl-homeassistant==0.0.1.dev2"] "requirements": ["anel-pwrctrl-homeassistant==0.0.1.dev2"]
} }

View File

@ -5,5 +5,5 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/aosmith", "documentation": "https://www.home-assistant.io/integrations/aosmith",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"requirements": ["py-aosmith==1.0.10"] "requirements": ["py-aosmith==1.0.12"]
} }

View File

@ -5,5 +5,6 @@
"documentation": "https://www.home-assistant.io/integrations/apache_kafka", "documentation": "https://www.home-assistant.io/integrations/apache_kafka",
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["aiokafka", "kafka_python"], "loggers": ["aiokafka", "kafka_python"],
"quality_scale": "legacy",
"requirements": ["aiokafka==0.10.0"] "requirements": ["aiokafka==0.10.0"]
} }

Some files were not shown because too many files have changed in this diff Show More