Merge branch 'dev' into mill

This commit is contained in:
Daniel Hjelseth Høyer 2025-03-21 06:24:03 +01:00 committed by GitHub
commit 93b0051ade
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
380 changed files with 17326 additions and 3612 deletions

View File

@ -69,7 +69,7 @@ jobs:
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations - name: Upload translations
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: translations name: translations
path: translations.tar.gz path: translations.tar.gz
@ -175,7 +175,7 @@ jobs:
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Download translations - name: Download translations
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: translations name: translations
@ -190,7 +190,7 @@ jobs:
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v3.3.0 uses: docker/login-action@v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@ -256,7 +256,7 @@ jobs:
fi fi
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v3.3.0 uses: docker/login-action@v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@ -330,14 +330,14 @@ jobs:
- name: Login to DockerHub - name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant' if: matrix.registry == 'docker.io/homeassistant'
uses: docker/login-action@v3.3.0 uses: docker/login-action@v3.4.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant' if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@v3.3.0 uses: docker/login-action@v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@ -462,7 +462,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download translations - name: Download translations
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: translations name: translations
@ -502,7 +502,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}

View File

@ -37,7 +37,7 @@ on:
type: boolean type: boolean
env: env:
CACHE_VERSION: 11 CACHE_VERSION: 12
UV_CACHE_VERSION: 1 UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 9 MYPY_CACHE_VERSION: 9
HA_SHORT_VERSION: "2025.4" HA_SHORT_VERSION: "2025.4"
@ -255,7 +255,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.2 uses: actions/cache@v4.2.3
with: with:
path: venv path: venv
key: >- key: >-
@ -271,7 +271,7 @@ jobs:
uv pip install "$(cat requirements_test.txt | grep pre-commit)" uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.2 uses: actions/cache@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@ -301,7 +301,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -310,7 +310,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@ -341,7 +341,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -350,7 +350,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@ -381,7 +381,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -390,7 +390,7 @@ jobs:
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true fail-on-cache-miss: true
@ -497,7 +497,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.2 uses: actions/cache@v4.2.3
with: with:
path: venv path: venv
key: >- key: >-
@ -505,7 +505,7 @@ jobs:
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore uv wheel cache - name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true' if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache@v4.2.2 uses: actions/cache@v4.2.3
with: with:
path: ${{ env.UV_CACHE_DIR }} path: ${{ env.UV_CACHE_DIR }}
key: >- key: >-
@ -552,7 +552,7 @@ jobs:
python --version python --version
uv pip freeze >> pip_freeze.txt uv pip freeze >> pip_freeze.txt
- name: Upload pip_freeze artifact - name: Upload pip_freeze artifact
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: pip-freeze-${{ matrix.python-version }} name: pip-freeze-${{ matrix.python-version }}
path: pip_freeze.txt path: pip_freeze.txt
@ -593,7 +593,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -626,7 +626,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -683,7 +683,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -695,7 +695,7 @@ jobs:
. venv/bin/activate . venv/bin/activate
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
- name: Upload licenses - name: Upload licenses
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: licenses-${{ github.run_number }}-${{ matrix.python-version }} name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
path: licenses-${{ matrix.python-version }}.json path: licenses-${{ matrix.python-version }}.json
@ -726,7 +726,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -773,7 +773,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -825,7 +825,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -833,7 +833,7 @@ jobs:
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }} needs.info.outputs.python_cache_key }}
- name: Restore mypy cache - name: Restore mypy cache
uses: actions/cache@v4.2.2 uses: actions/cache@v4.2.3
with: with:
path: .mypy_cache path: .mypy_cache
key: >- key: >-
@ -895,7 +895,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -907,7 +907,7 @@ jobs:
. venv/bin/activate . venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
- name: Upload pytest_buckets - name: Upload pytest_buckets
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: pytest_buckets name: pytest_buckets
path: pytest_buckets.txt path: pytest_buckets.txt
@ -955,7 +955,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -968,7 +968,7 @@ jobs:
run: | run: |
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
- name: Download pytest_buckets - name: Download pytest_buckets
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: pytest_buckets name: pytest_buckets
- name: Compile English translations - name: Compile English translations
@ -1007,21 +1007,21 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure' if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt path: pytest-*.txt
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }} name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml path: coverage.xml
overwrite: true overwrite: true
- name: Upload test results artifact - name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled() if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }} name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
path: junit.xml path: junit.xml
@ -1080,7 +1080,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -1138,7 +1138,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure' if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }} steps.pytest-partial.outputs.mariadb }}
@ -1146,7 +1146,7 @@ jobs:
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: coverage-${{ matrix.python-version }}-${{ name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }} steps.pytest-partial.outputs.mariadb }}
@ -1154,7 +1154,7 @@ jobs:
overwrite: true overwrite: true
- name: Upload test results artifact - name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled() if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: test-results-mariadb-${{ matrix.python-version }}-${{ name: test-results-mariadb-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.mariadb }} steps.pytest-partial.outputs.mariadb }}
@ -1214,7 +1214,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -1273,7 +1273,7 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure' if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }} steps.pytest-partial.outputs.postgresql }}
@ -1281,7 +1281,7 @@ jobs:
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: coverage-${{ matrix.python-version }}-${{ name: coverage-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }} steps.pytest-partial.outputs.postgresql }}
@ -1289,7 +1289,7 @@ jobs:
overwrite: true overwrite: true
- name: Upload test results artifact - name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled() if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: test-results-postgres-${{ matrix.python-version }}-${{ name: test-results-postgres-${{ matrix.python-version }}-${{
steps.pytest-partial.outputs.postgresql }} steps.pytest-partial.outputs.postgresql }}
@ -1312,7 +1312,7 @@ jobs:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
pattern: coverage-* pattern: coverage-*
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
@ -1365,7 +1365,7 @@ jobs:
check-latest: true check-latest: true
- name: Restore full Python ${{ matrix.python-version }} virtual environment - name: Restore full Python ${{ matrix.python-version }} virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v4.2.2 uses: actions/cache/restore@v4.2.3
with: with:
path: venv path: venv
fail-on-cache-miss: true fail-on-cache-miss: true
@ -1420,21 +1420,21 @@ jobs:
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
- name: Upload pytest output - name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure' if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
path: pytest-*.txt path: pytest-*.txt
overwrite: true overwrite: true
- name: Upload coverage artifact - name: Upload coverage artifact
if: needs.info.outputs.skip_coverage != 'true' if: needs.info.outputs.skip_coverage != 'true'
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: coverage-${{ matrix.python-version }}-${{ matrix.group }} name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
path: coverage.xml path: coverage.xml
overwrite: true overwrite: true
- name: Upload test results artifact - name: Upload test results artifact
if: needs.info.outputs.skip_coverage != 'true' && !cancelled() if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }} name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
path: junit.xml path: junit.xml
@ -1454,7 +1454,7 @@ jobs:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
pattern: coverage-* pattern: coverage-*
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
@ -1479,7 +1479,7 @@ jobs:
timeout-minutes: 10 timeout-minutes: 10
steps: steps:
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
pattern: test-results-* pattern: test-results-*
- name: Upload test results to Codecov - name: Upload test results to Codecov

View File

@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v3.28.11 uses: github/codeql-action/init@v3.28.12
with: with:
languages: python languages: python
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.28.11 uses: github/codeql-action/analyze@v3.28.12
with: with:
category: "/language:python" category: "/language:python"

View File

@ -91,7 +91,7 @@ jobs:
) > build_constraints.txt ) > build_constraints.txt
- name: Upload env_file - name: Upload env_file
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: env_file name: env_file
path: ./.env_file path: ./.env_file
@ -99,14 +99,14 @@ jobs:
overwrite: true overwrite: true
- name: Upload build_constraints - name: Upload build_constraints
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: build_constraints name: build_constraints
path: ./build_constraints.txt path: ./build_constraints.txt
overwrite: true overwrite: true
- name: Upload requirements_diff - name: Upload requirements_diff
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: requirements_diff name: requirements_diff
path: ./requirements_diff.txt path: ./requirements_diff.txt
@ -118,7 +118,7 @@ jobs:
python -m script.gen_requirements_all ci python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels - name: Upload requirements_all_wheels
uses: actions/upload-artifact@v4.6.1 uses: actions/upload-artifact@v4.6.2
with: with:
name: requirements_all_wheels name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt path: ./requirements_all_wheels_*.txt
@ -138,17 +138,17 @@ jobs:
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Download env_file - name: Download env_file
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: env_file name: env_file
- name: Download build_constraints - name: Download build_constraints
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: build_constraints name: build_constraints
- name: Download requirements_diff - name: Download requirements_diff
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: requirements_diff name: requirements_diff
@ -187,22 +187,22 @@ jobs:
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.2
- name: Download env_file - name: Download env_file
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: env_file name: env_file
- name: Download build_constraints - name: Download build_constraints
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: build_constraints name: build_constraints
- name: Download requirements_diff - name: Download requirements_diff
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: requirements_diff name: requirements_diff
- name: Download requirements_all_wheels - name: Download requirements_all_wheels
uses: actions/download-artifact@v4.1.9 uses: actions/download-artifact@v4.2.1
with: with:
name: requirements_all_wheels name: requirements_all_wheels

View File

@ -412,6 +412,7 @@ homeassistant.components.recollect_waste.*
homeassistant.components.recorder.* homeassistant.components.recorder.*
homeassistant.components.remember_the_milk.* homeassistant.components.remember_the_milk.*
homeassistant.components.remote.* homeassistant.components.remote.*
homeassistant.components.remote_calendar.*
homeassistant.components.renault.* homeassistant.components.renault.*
homeassistant.components.reolink.* homeassistant.components.reolink.*
homeassistant.components.repairs.* homeassistant.components.repairs.*

6
CODEOWNERS generated
View File

@ -570,8 +570,8 @@ build.json @home-assistant/supervisor
/tests/components/google_cloud/ @lufton @tronikos /tests/components/google_cloud/ @lufton @tronikos
/homeassistant/components/google_drive/ @tronikos /homeassistant/components/google_drive/ @tronikos
/tests/components/google_drive/ @tronikos /tests/components/google_drive/ @tronikos
/homeassistant/components/google_generative_ai_conversation/ @tronikos /homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh
/tests/components/google_generative_ai_conversation/ @tronikos /tests/components/google_generative_ai_conversation/ @tronikos @ivanlh
/homeassistant/components/google_mail/ @tkdrob /homeassistant/components/google_mail/ @tkdrob
/tests/components/google_mail/ @tkdrob /tests/components/google_mail/ @tkdrob
/homeassistant/components/google_photos/ @allenporter /homeassistant/components/google_photos/ @allenporter
@ -1252,6 +1252,8 @@ build.json @home-assistant/supervisor
/tests/components/refoss/ @ashionky /tests/components/refoss/ @ashionky
/homeassistant/components/remote/ @home-assistant/core /homeassistant/components/remote/ @home-assistant/core
/tests/components/remote/ @home-assistant/core /tests/components/remote/ @home-assistant/core
/homeassistant/components/remote_calendar/ @Thomas55555
/tests/components/remote_calendar/ @Thomas55555
/homeassistant/components/renault/ @epenet /homeassistant/components/renault/ @epenet
/tests/components/renault/ @epenet /tests/components/renault/ @epenet
/homeassistant/components/renson/ @jimmyd-be /homeassistant/components/renson/ @jimmyd-be

2
Dockerfile generated
View File

@ -31,7 +31,7 @@ RUN \
&& go2rtc --version && go2rtc --version
# Install uv # Install uv
RUN pip3 install uv==0.6.1 RUN pip3 install uv==0.6.8
WORKDIR /usr/src WORKDIR /usr/src

View File

@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator(
async with timeout(10): async with timeout(10):
result = await self.accuweather.async_get_current_conditions() result = await self.accuweather.async_get_current_conditions()
except EXCEPTIONS as error: except EXCEPTIONS as error:
raise UpdateFailed(error) from error raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="current_conditions_update_error",
translation_placeholders={"error": repr(error)},
) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)
@ -121,7 +125,11 @@ class AccuWeatherDailyForecastDataUpdateCoordinator(
language=self.hass.config.language language=self.hass.config.language
) )
except EXCEPTIONS as error: except EXCEPTIONS as error:
raise UpdateFailed(error) from error raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="forecast_update_error",
translation_placeholders={"error": repr(error)},
) from error
_LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining)

View File

@ -229,6 +229,14 @@
} }
} }
}, },
"exceptions": {
"current_conditions_update_error": {
"message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}"
},
"forecast_update_error": {
"message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}"
}
},
"system_health": { "system_health": {
"info": { "info": {
"can_reach_server": "Reach AccuWeather server", "can_reach_server": "Reach AccuWeather server",

View File

@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
try: try:
await measurements.update() await measurements.update()
except (AirlyError, ClientConnectorError) as error: except (AirlyError, ClientConnectorError) as error:
raise UpdateFailed(error) from error raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={
"entry": self.config_entry.title,
"error": repr(error),
},
) from error
_LOGGER.debug( _LOGGER.debug(
"Requests remaining: %s/%s", "Requests remaining: %s/%s",
@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i
standards = measurements.current["standards"] standards = measurements.current["standards"]
if index["description"] == NO_AIRLY_SENSORS: if index["description"] == NO_AIRLY_SENSORS:
raise UpdateFailed("Can't retrieve data: no Airly sensors in this area") raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="no_station",
translation_placeholders={"entry": self.config_entry.title},
)
for value in values: for value in values:
data[value["name"]] = value["value"] data[value["name"]] = value["value"]
for standard in standards: for standard in standards:

View File

@ -36,5 +36,13 @@
"name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]" "name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]"
} }
} }
},
"exceptions": {
"update_error": {
"message": "An error occurred while retrieving data from the Airly API for {entry}: {error}"
},
"no_station": {
"message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area"
}
} }
} }

View File

@ -351,7 +351,7 @@ class BackupManager:
# Latest backup event and backup event subscribers # Latest backup event and backup event subscribers
self.last_event: ManagerStateEvent = BlockedEvent() self.last_event: ManagerStateEvent = BlockedEvent()
self.last_non_idle_event: ManagerStateEvent | None = None self.last_action_event: ManagerStateEvent | None = None
self._backup_event_subscriptions = hass.data[ self._backup_event_subscriptions = hass.data[
DATA_BACKUP DATA_BACKUP
].backup_event_subscriptions ].backup_event_subscriptions
@ -1337,7 +1337,7 @@ class BackupManager:
LOGGER.debug("Backup state: %s -> %s", current_state, new_state) LOGGER.debug("Backup state: %s -> %s", current_state, new_state)
self.last_event = event self.last_event = event
if not isinstance(event, (BlockedEvent, IdleEvent)): if not isinstance(event, (BlockedEvent, IdleEvent)):
self.last_non_idle_event = event self.last_action_event = event
for subscription in self._backup_event_subscriptions: for subscription in self._backup_event_subscriptions:
subscription(event) subscription(event)

View File

@ -55,7 +55,7 @@ async def handle_info(
"backups": list(backups.values()), "backups": list(backups.values()),
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
"last_non_idle_event": manager.last_non_idle_event, "last_action_event": manager.last_action_event,
"next_automatic_backup": manager.config.data.schedule.next_automatic_backup, "next_automatic_backup": manager.config.data.schedule.next_automatic_backup,
"next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional, "next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional,
"state": manager.state, "state": manager.state,

View File

@ -20,7 +20,7 @@
"bluetooth-adapters==0.21.4", "bluetooth-adapters==0.21.4",
"bluetooth-auto-recovery==1.4.5", "bluetooth-auto-recovery==1.4.5",
"bluetooth-data-tools==1.26.1", "bluetooth-data-tools==1.26.1",
"dbus-fast==2.39.5", "dbus-fast==2.39.6",
"habluetooth==3.27.0" "habluetooth==3.32.0"
] ]
} }

View File

@ -4,10 +4,14 @@ from __future__ import annotations
from typing import Any from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_EMAIL, CONF_NAME
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from .coordinator import BringConfigEntry from .coordinator import BringConfigEntry
TO_REDACT = {CONF_NAME, CONF_EMAIL}
async def async_get_config_entry_diagnostics( async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: BringConfigEntry hass: HomeAssistant, config_entry: BringConfigEntry
@ -15,7 +19,10 @@ async def async_get_config_entry_diagnostics(
"""Return diagnostics for a config entry.""" """Return diagnostics for a config entry."""
return { return {
"data": {k: v.to_dict() for k, v in config_entry.runtime_data.data.items()}, "data": {
k: async_redact_data(v.to_dict(), TO_REDACT)
for k, v in config_entry.runtime_data.data.items()
},
"lists": [lst.to_dict() for lst in config_entry.runtime_data.lists], "lists": [lst.to_dict() for lst in config_entry.runtime_data.lists],
"user_settings": config_entry.runtime_data.user_settings.to_dict(), "user_settings": config_entry.runtime_data.user_settings.to_dict(),
} }

View File

@ -8,5 +8,5 @@
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["bring_api"], "loggers": ["bring_api"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["bring-api==1.0.2"] "requirements": ["bring-api==1.1.0"]
} }

View File

@ -9,6 +9,7 @@ from homeassistant.const import CONF_HOST, CONF_TYPE, Platform
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.exceptions import ConfigEntryNotReady
from .const import DOMAIN
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
PLATFORMS = [Platform.SENSOR] PLATFORMS = [Platform.SENSOR]
@ -25,7 +26,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
host, printer_type=printer_type, snmp_engine=snmp_engine host, printer_type=printer_type, snmp_engine=snmp_engine
) )
except (ConnectionError, SnmpError, TimeoutError) as error: except (ConnectionError, SnmpError, TimeoutError) as error:
raise ConfigEntryNotReady from error raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={
"device": entry.title,
"error": repr(error),
},
) from error
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother) coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()

View File

@ -26,6 +26,7 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]):
) -> None: ) -> None:
"""Initialize.""" """Initialize."""
self.brother = brother self.brother = brother
self.device_name = config_entry.title
super().__init__( super().__init__(
hass, hass,
@ -41,5 +42,12 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]):
async with timeout(20): async with timeout(20):
data = await self.brother.async_update() data = await self.brother.async_update()
except (ConnectionError, SnmpError, UnsupportedModelError) as error: except (ConnectionError, SnmpError, UnsupportedModelError) as error:
raise UpdateFailed(error) from error raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={
"device": self.device_name,
"error": repr(error),
},
) from error
return data return data

View File

@ -159,5 +159,13 @@
"name": "Last restart" "name": "Last restart"
} }
} }
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while connecting to the {device} printer: {error}"
},
"update_error": {
"message": "An error occurred while retrieving data from the {device} printer: {error}"
}
} }
} }

View File

@ -21,8 +21,8 @@
"step": { "step": {
"init": { "init": {
"data": { "data": {
"ffmpeg_arguments": "Arguments passed to ffmpeg for cameras", "ffmpeg_arguments": "Arguments passed to FFmpeg for cameras",
"timeout": "Request Timeout (seconds)" "timeout": "Request timeout (seconds)"
} }
} }
} }

View File

@ -16,12 +16,21 @@ from homeassistant.config_entries import (
from homeassistant.const import CONF_UUID from homeassistant.const import CONF_UUID
from homeassistant.core import callback from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import CONF_IGNORE_CEC, CONF_KNOWN_HOSTS, DOMAIN from .const import CONF_IGNORE_CEC, CONF_KNOWN_HOSTS, DOMAIN
IGNORE_CEC_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) IGNORE_CEC_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
KNOWN_HOSTS_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) KNOWN_HOSTS_SCHEMA = vol.Schema(
{
vol.Optional(
CONF_KNOWN_HOSTS,
): SelectSelector(
SelectSelectorConfig(custom_value=True, options=[], multiple=True),
)
}
)
WANTED_UUID_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) WANTED_UUID_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string]))
@ -30,12 +39,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
VERSION = 1 VERSION = 1
def __init__(self) -> None:
"""Initialize flow."""
self._ignore_cec = set[str]()
self._known_hosts = set[str]()
self._wanted_uuid = set[str]()
@staticmethod @staticmethod
@callback @callback
def async_get_options_flow( def async_get_options_flow(
@ -62,48 +65,31 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Confirm the setup.""" """Confirm the setup."""
errors = {}
data = {CONF_KNOWN_HOSTS: self._known_hosts}
if user_input is not None: if user_input is not None:
bad_hosts = False known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, []))
known_hosts = user_input[CONF_KNOWN_HOSTS] return self.async_create_entry(
known_hosts = [x.strip() for x in known_hosts.split(",") if x.strip()] title="Google Cast",
try: data=self._get_data(known_hosts=known_hosts),
known_hosts = KNOWN_HOSTS_SCHEMA(known_hosts)
except vol.Invalid:
errors["base"] = "invalid_known_hosts"
bad_hosts = True
else:
self._known_hosts = known_hosts
data = self._get_data()
if not bad_hosts:
return self.async_create_entry(title="Google Cast", data=data)
fields = {}
fields[vol.Optional(CONF_KNOWN_HOSTS, default="")] = str
return self.async_show_form(
step_id="config", data_schema=vol.Schema(fields), errors=errors
) )
return self.async_show_form(step_id="config", data_schema=KNOWN_HOSTS_SCHEMA)
async def async_step_confirm( async def async_step_confirm(
self, user_input: dict[str, Any] | None = None self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Confirm the setup.""" """Confirm the setup."""
data = self._get_data()
if user_input is not None or not onboarding.async_is_onboarded(self.hass): if user_input is not None or not onboarding.async_is_onboarded(self.hass):
return self.async_create_entry(title="Google Cast", data=data) return self.async_create_entry(title="Google Cast", data=self._get_data())
return self.async_show_form(step_id="confirm") return self.async_show_form(step_id="confirm")
def _get_data(self): def _get_data(
self, *, known_hosts: list[str] | None = None
) -> dict[str, list[str]]:
return { return {
CONF_IGNORE_CEC: list(self._ignore_cec), CONF_IGNORE_CEC: [],
CONF_KNOWN_HOSTS: list(self._known_hosts), CONF_KNOWN_HOSTS: known_hosts or [],
CONF_UUID: list(self._wanted_uuid), CONF_UUID: [],
} }
@ -123,14 +109,9 @@ class CastOptionsFlowHandler(OptionsFlow):
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Manage the Google Cast options.""" """Manage the Google Cast options."""
errors: dict[str, str] = {} errors: dict[str, str] = {}
current_config = self.config_entry.data
if user_input is not None: if user_input is not None:
bad_hosts, known_hosts = _string_to_list( known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, []))
user_input.get(CONF_KNOWN_HOSTS, ""), KNOWN_HOSTS_SCHEMA self.updated_config = dict(self.config_entry.data)
)
if not bad_hosts:
self.updated_config = dict(current_config)
self.updated_config[CONF_KNOWN_HOSTS] = known_hosts self.updated_config[CONF_KNOWN_HOSTS] = known_hosts
if self.show_advanced_options: if self.show_advanced_options:
@ -141,13 +122,11 @@ class CastOptionsFlowHandler(OptionsFlow):
) )
return self.async_create_entry(title="", data={}) return self.async_create_entry(title="", data={})
fields: dict[vol.Marker, type[str]] = {}
suggested_value = _list_to_string(current_config.get(CONF_KNOWN_HOSTS))
_add_with_suggestion(fields, CONF_KNOWN_HOSTS, suggested_value)
return self.async_show_form( return self.async_show_form(
step_id="basic_options", step_id="basic_options",
data_schema=vol.Schema(fields), data_schema=self.add_suggested_values_to_schema(
KNOWN_HOSTS_SCHEMA, self.config_entry.data
),
errors=errors, errors=errors,
last_step=not self.show_advanced_options, last_step=not self.show_advanced_options,
) )
@ -206,6 +185,10 @@ def _string_to_list(string, schema):
return invalid, items return invalid, items
def _trim_items(items: list[str]) -> list[str]:
return [x.strip() for x in items if x.strip()]
def _add_with_suggestion( def _add_with_suggestion(
fields: dict[vol.Marker, type[str]], key: str, suggested_value: str fields: dict[vol.Marker, type[str]], key: str, suggested_value: str
) -> None: ) -> None:

View File

@ -2,7 +2,7 @@
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, TypedDict from typing import TYPE_CHECKING, NotRequired, TypedDict
from homeassistant.util.signal_type import SignalType from homeassistant.util.signal_type import SignalType
@ -46,3 +46,4 @@ class HomeAssistantControllerData(TypedDict):
hass_uuid: str hass_uuid: str
client_id: str | None client_id: str | None
refresh_token: str refresh_token: str
app_id: NotRequired[str]

View File

@ -7,6 +7,7 @@ from dataclasses import dataclass
import logging import logging
from typing import TYPE_CHECKING, ClassVar from typing import TYPE_CHECKING, ClassVar
from urllib.parse import urlparse from urllib.parse import urlparse
from uuid import UUID
import aiohttp import aiohttp
import attr import attr
@ -40,7 +41,7 @@ class ChromecastInfo:
is_dynamic_group = attr.ib(type=bool | None, default=None) is_dynamic_group = attr.ib(type=bool | None, default=None)
@property @property
def friendly_name(self) -> str: def friendly_name(self) -> str | None:
"""Return the Friendly Name.""" """Return the Friendly Name."""
return self.cast_info.friendly_name return self.cast_info.friendly_name
@ -50,7 +51,7 @@ class ChromecastInfo:
return self.cast_info.cast_type == CAST_TYPE_GROUP return self.cast_info.cast_type == CAST_TYPE_GROUP
@property @property
def uuid(self) -> bool: def uuid(self) -> UUID:
"""Return the UUID.""" """Return the UUID."""
return self.cast_info.uuid return self.cast_info.uuid
@ -111,7 +112,10 @@ class ChromecastInfo:
is_dynamic_group = False is_dynamic_group = False
http_group_status = None http_group_status = None
http_group_status = dial.get_multizone_status( http_group_status = dial.get_multizone_status(
None, # We pass services which will be used for the HTTP request, and we
# don't care about the host in http_group_status.dynamic_groups so
# we pass an empty string to simplify the code.
"",
services=self.cast_info.services, services=self.cast_info.services,
zconf=ChromeCastZeroconf.get_zeroconf(), zconf=ChromeCastZeroconf.get_zeroconf(),
) )

View File

@ -14,7 +14,7 @@
"documentation": "https://www.home-assistant.io/integrations/cast", "documentation": "https://www.home-assistant.io/integrations/cast",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["casttube", "pychromecast"], "loggers": ["casttube", "pychromecast"],
"requirements": ["PyChromecast==14.0.5"], "requirements": ["PyChromecast==14.0.6"],
"single_config_entry": true, "single_config_entry": true,
"zeroconf": ["_googlecast._tcp.local."] "zeroconf": ["_googlecast._tcp.local."]
} }

View File

@ -7,11 +7,11 @@ show_lovelace_view:
integration: cast integration: cast
domain: media_player domain: media_player
dashboard_path: dashboard_path:
required: true
example: lovelace-cast example: lovelace-cast
selector: selector:
text: text:
view_path: view_path:
required: true
example: downstairs example: downstairs
selector: selector:
text: text:

View File

@ -6,9 +6,11 @@
}, },
"config": { "config": {
"title": "Google Cast configuration", "title": "Google Cast configuration",
"description": "Known Hosts - A comma-separated list of hostnames or IP-addresses of cast devices, use if mDNS discovery is not working.",
"data": { "data": {
"known_hosts": "Known hosts" "known_hosts": "Add known host"
},
"data_description": {
"known_hosts": "Hostnames or IP-addresses of cast devices, use if mDNS discovery is not working"
} }
} }
}, },
@ -20,9 +22,11 @@
"step": { "step": {
"basic_options": { "basic_options": {
"title": "[%key:component::cast::config::step::config::title%]", "title": "[%key:component::cast::config::step::config::title%]",
"description": "[%key:component::cast::config::step::config::description%]",
"data": { "data": {
"known_hosts": "[%key:component::cast::config::step::config::data::known_hosts%]" "known_hosts": "[%key:component::cast::config::step::config::data::known_hosts%]"
},
"data_description": {
"known_hosts": "[%key:component::cast::config::step::config::data_description::known_hosts%]"
} }
}, },
"advanced_options": { "advanced_options": {
@ -49,7 +53,7 @@
}, },
"dashboard_path": { "dashboard_path": {
"name": "Dashboard path", "name": "Dashboard path",
"description": "The URL path of the dashboard to show." "description": "The URL path of the dashboard to show, defaults to lovelace if not specified."
}, },
"view_path": { "view_path": {
"name": "View path", "name": "View path",

View File

@ -51,8 +51,7 @@ def async_get_chat_log(
) )
if user_input is not None and ( if user_input is not None and (
(content := chat_log.content[-1]).role != "user" (content := chat_log.content[-1]).role != "user"
# MyPy doesn't understand that content is a UserContent here or content.content != user_input.text
or content.content != user_input.text # type: ignore[union-attr]
): ):
chat_log.async_add_user_content(UserContent(content=user_input.text)) chat_log.async_add_user_content(UserContent(content=user_input.text))
@ -128,7 +127,7 @@ class ConverseError(HomeAssistantError):
class SystemContent: class SystemContent:
"""Base class for chat messages.""" """Base class for chat messages."""
role: str = field(init=False, default="system") role: Literal["system"] = field(init=False, default="system")
content: str content: str
@ -136,7 +135,7 @@ class SystemContent:
class UserContent: class UserContent:
"""Assistant content.""" """Assistant content."""
role: str = field(init=False, default="user") role: Literal["user"] = field(init=False, default="user")
content: str content: str
@ -144,7 +143,7 @@ class UserContent:
class AssistantContent: class AssistantContent:
"""Assistant content.""" """Assistant content."""
role: str = field(init=False, default="assistant") role: Literal["assistant"] = field(init=False, default="assistant")
agent_id: str agent_id: str
content: str | None = None content: str | None = None
tool_calls: list[llm.ToolInput] | None = None tool_calls: list[llm.ToolInput] | None = None
@ -154,7 +153,7 @@ class AssistantContent:
class ToolResultContent: class ToolResultContent:
"""Tool result content.""" """Tool result content."""
role: str = field(init=False, default="tool_result") role: Literal["tool_result"] = field(init=False, default="tool_result")
agent_id: str agent_id: str
tool_call_id: str tool_call_id: str
tool_name: str tool_name: str
@ -193,8 +192,8 @@ class ChatLog:
return ( return (
last_msg.role == "assistant" last_msg.role == "assistant"
and last_msg.content is not None # type: ignore[union-attr] and last_msg.content is not None
and last_msg.content.strip().endswith( # type: ignore[union-attr] and last_msg.content.strip().endswith(
( (
"?", "?",
";", # Greek question mark ";", # Greek question mark

View File

@ -101,9 +101,11 @@ def hostname_from_url(url: str) -> str:
def _host_validator(config: dict[str, str]) -> dict[str, str]: def _host_validator(config: dict[str, str]) -> dict[str, str]:
"""Validate that a host is properly configured.""" """Validate that a host is properly configured."""
if config[CONF_HOST].startswith("elks://"): if config[CONF_HOST].startswith(("elks://", "elksv1_2://")):
if CONF_USERNAME not in config or CONF_PASSWORD not in config: if CONF_USERNAME not in config or CONF_PASSWORD not in config:
raise vol.Invalid("Specify username and password for elks://") raise vol.Invalid(
"Specify username and password for elks:// or elksv1_2://"
)
elif not config[CONF_HOST].startswith("elk://") and not config[ elif not config[CONF_HOST].startswith("elk://") and not config[
CONF_HOST CONF_HOST
].startswith("serial://"): ].startswith("serial://"):

View File

@ -1,5 +1,5 @@
{ {
"title": "Energenie Power Sockets Integration.", "title": "Energenie Power Sockets",
"config": { "config": {
"step": { "step": {
"user": { "user": {

View File

@ -16,7 +16,7 @@
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
"mqtt": ["esphome/discover/#"], "mqtt": ["esphome/discover/#"],
"requirements": [ "requirements": [
"aioesphomeapi==29.6.0", "aioesphomeapi==29.7.0",
"esphome-dashboard-api==1.2.3", "esphome-dashboard-api==1.2.3",
"bleak-esphome==2.12.0" "bleak-esphome==2.12.0"
], ],

View File

@ -54,7 +54,7 @@
"init": { "init": {
"data": { "data": {
"timeout": "Request timeout (seconds)", "timeout": "Request timeout (seconds)",
"ffmpeg_arguments": "Arguments passed to ffmpeg for cameras" "ffmpeg_arguments": "Arguments passed to FFmpeg for cameras"
} }
} }
} }

View File

@ -2,7 +2,7 @@
"services": { "services": {
"restart": { "restart": {
"name": "[%key:common::action::restart%]", "name": "[%key:common::action::restart%]",
"description": "Sends a restart command to a ffmpeg based sensor.", "description": "Sends a restart command to an FFmpeg-based sensor.",
"fields": { "fields": {
"entity_id": { "entity_id": {
"name": "Entity", "name": "Entity",
@ -12,7 +12,7 @@
}, },
"start": { "start": {
"name": "[%key:common::action::start%]", "name": "[%key:common::action::start%]",
"description": "Sends a start command to a ffmpeg based sensor.", "description": "Sends a start command to an FFmpeg-based sensor.",
"fields": { "fields": {
"entity_id": { "entity_id": {
"name": "Entity", "name": "Entity",
@ -22,7 +22,7 @@
}, },
"stop": { "stop": {
"name": "[%key:common::action::stop%]", "name": "[%key:common::action::stop%]",
"description": "Sends a stop command to a ffmpeg based sensor.", "description": "Sends a stop command to an FFmpeg-based sensor.",
"fields": { "fields": {
"entity_id": { "entity_id": {
"name": "Entity", "name": "Entity",

View File

@ -9,5 +9,5 @@
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["fyta_cli"], "loggers": ["fyta_cli"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["fyta_cli==0.7.1"] "requirements": ["fyta_cli==0.7.2"]
} }

View File

@ -44,7 +44,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: GiosConfigEntry) -> bool
try: try:
gios = await Gios.create(websession, station_id) gios = await Gios.create(websession, station_id)
except (GiosError, ConnectionError, ClientConnectorError) as err: except (GiosError, ConnectionError, ClientConnectorError) as err:
raise ConfigEntryNotReady from err raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={
"entry": entry.title,
"error": repr(err),
},
) from err
coordinator = GiosDataUpdateCoordinator(hass, entry, gios) coordinator = GiosDataUpdateCoordinator(hass, entry, gios)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()

View File

@ -57,4 +57,11 @@ class GiosDataUpdateCoordinator(DataUpdateCoordinator[GiosSensors]):
async with asyncio.timeout(API_TIMEOUT): async with asyncio.timeout(API_TIMEOUT):
return await self.gios.async_update() return await self.gios.async_update()
except (GiosError, ClientConnectorError) as error: except (GiosError, ClientConnectorError) as error:
raise UpdateFailed(error) from error raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={
"entry": self.config_entry.title,
"error": repr(error),
},
) from error

View File

@ -170,5 +170,13 @@
} }
} }
} }
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while connecting to the GIOS API for {entry}: {error}"
},
"update_error": {
"message": "An error occurred while retrieving data from the GIOS API for {entry}: {error}"
}
} }
} }

View File

@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/google", "documentation": "https://www.home-assistant.io/integrations/google",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["googleapiclient"], "loggers": ["googleapiclient"],
"requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==8.3.0"] "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.1"]
} }

View File

@ -2,6 +2,7 @@
from __future__ import annotations from __future__ import annotations
import mimetypes
from pathlib import Path from pathlib import Path
from google import genai # type: ignore[attr-defined] from google import genai # type: ignore[attr-defined]
@ -83,7 +84,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
) )
if not Path(filename).exists(): if not Path(filename).exists():
raise HomeAssistantError(f"`{filename}` does not exist") raise HomeAssistantError(f"`{filename}` does not exist")
prompt_parts.append(client.files.upload(file=filename)) mimetype = mimetypes.guess_type(filename)[0]
with open(filename, "rb") as file:
uploaded_file = client.files.upload(
file=file, config={"mime_type": mimetype}
)
prompt_parts.append(uploaded_file)
await hass.async_add_executor_job(append_files_to_prompt) await hass.async_add_executor_job(append_files_to_prompt)

View File

@ -188,7 +188,7 @@ def _convert_content(
| conversation.SystemContent, | conversation.SystemContent,
) -> Content: ) -> Content:
"""Convert HA content to Google content.""" """Convert HA content to Google content."""
if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr] if content.role != "assistant" or not content.tool_calls:
role = "model" if content.role == "assistant" else content.role role = "model" if content.role == "assistant" else content.role
return Content( return Content(
role=role, role=role,
@ -321,24 +321,14 @@ class GoogleGenerativeAIConversationEntity(
for chat_content in chat_log.content[1:-1]: for chat_content in chat_log.content[1:-1]:
if chat_content.role == "tool_result": if chat_content.role == "tool_result":
# mypy doesn't like picking a type based on checking shared property 'role' tool_results.append(chat_content)
tool_results.append(cast(conversation.ToolResultContent, chat_content))
continue continue
if tool_results: if tool_results:
messages.append(_create_google_tool_response_content(tool_results)) messages.append(_create_google_tool_response_content(tool_results))
tool_results.clear() tool_results.clear()
messages.append( messages.append(_convert_content(chat_content))
_convert_content(
cast(
conversation.UserContent
| conversation.SystemContent
| conversation.AssistantContent,
chat_content,
)
)
)
if tool_results: if tool_results:
messages.append(_create_google_tool_response_content(tool_results)) messages.append(_create_google_tool_response_content(tool_results))

View File

@ -2,7 +2,7 @@
"domain": "google_generative_ai_conversation", "domain": "google_generative_ai_conversation",
"name": "Google Generative AI", "name": "Google Generative AI",
"after_dependencies": ["assist_pipeline", "intent"], "after_dependencies": ["assist_pipeline", "intent"],
"codeowners": ["@tronikos"], "codeowners": ["@tronikos", "@ivanlh"],
"config_flow": true, "config_flow": true,
"dependencies": ["conversation"], "dependencies": ["conversation"],
"documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation", "documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation",

View File

@ -38,28 +38,28 @@
"name": "Input 1 voltage" "name": "Input 1 voltage"
}, },
"inverter_amperage_input_1": { "inverter_amperage_input_1": {
"name": "Input 1 Amperage" "name": "Input 1 amperage"
}, },
"inverter_wattage_input_1": { "inverter_wattage_input_1": {
"name": "Input 1 Wattage" "name": "Input 1 wattage"
}, },
"inverter_voltage_input_2": { "inverter_voltage_input_2": {
"name": "Input 2 voltage" "name": "Input 2 voltage"
}, },
"inverter_amperage_input_2": { "inverter_amperage_input_2": {
"name": "Input 2 Amperage" "name": "Input 2 amperage"
}, },
"inverter_wattage_input_2": { "inverter_wattage_input_2": {
"name": "Input 2 Wattage" "name": "Input 2 wattage"
}, },
"inverter_voltage_input_3": { "inverter_voltage_input_3": {
"name": "Input 3 voltage" "name": "Input 3 voltage"
}, },
"inverter_amperage_input_3": { "inverter_amperage_input_3": {
"name": "Input 3 Amperage" "name": "Input 3 amperage"
}, },
"inverter_wattage_input_3": { "inverter_wattage_input_3": {
"name": "Input 3 Wattage" "name": "Input 3 wattage"
}, },
"inverter_internal_wattage": { "inverter_internal_wattage": {
"name": "Internal wattage" "name": "Internal wattage"
@ -137,13 +137,13 @@
"name": "Load consumption" "name": "Load consumption"
}, },
"mix_wattage_pv_1": { "mix_wattage_pv_1": {
"name": "PV1 Wattage" "name": "PV1 wattage"
}, },
"mix_wattage_pv_2": { "mix_wattage_pv_2": {
"name": "PV2 Wattage" "name": "PV2 wattage"
}, },
"mix_wattage_pv_all": { "mix_wattage_pv_all": {
"name": "All PV Wattage" "name": "All PV wattage"
}, },
"mix_export_to_grid": { "mix_export_to_grid": {
"name": "Export to grid" "name": "Export to grid"
@ -182,7 +182,7 @@
"name": "Storage production today" "name": "Storage production today"
}, },
"storage_storage_production_lifetime": { "storage_storage_production_lifetime": {
"name": "Lifetime Storage production" "name": "Lifetime storage production"
}, },
"storage_grid_discharge_today": { "storage_grid_discharge_today": {
"name": "Grid discharged today" "name": "Grid discharged today"
@ -224,7 +224,7 @@
"name": "Storage charging/ discharging(-ve)" "name": "Storage charging/ discharging(-ve)"
}, },
"storage_load_consumption_solar_storage": { "storage_load_consumption_solar_storage": {
"name": "Load consumption (Solar + Storage)" "name": "Load consumption (solar + storage)"
}, },
"storage_charge_today": { "storage_charge_today": {
"name": "Charge today" "name": "Charge today"
@ -257,7 +257,7 @@
"name": "Output voltage" "name": "Output voltage"
}, },
"storage_ac_output_frequency": { "storage_ac_output_frequency": {
"name": "Ac output frequency" "name": "AC output frequency"
}, },
"storage_current_pv": { "storage_current_pv": {
"name": "Solar charge current" "name": "Solar charge current"
@ -290,7 +290,7 @@
"name": "Lifetime total energy input 1" "name": "Lifetime total energy input 1"
}, },
"tlx_energy_today_input_1": { "tlx_energy_today_input_1": {
"name": "Energy Today Input 1" "name": "Energy today input 1"
}, },
"tlx_voltage_input_1": { "tlx_voltage_input_1": {
"name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_1::name%]" "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_1::name%]"
@ -305,7 +305,7 @@
"name": "Lifetime total energy input 2" "name": "Lifetime total energy input 2"
}, },
"tlx_energy_today_input_2": { "tlx_energy_today_input_2": {
"name": "Energy Today Input 2" "name": "Energy today input 2"
}, },
"tlx_voltage_input_2": { "tlx_voltage_input_2": {
"name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_2::name%]" "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_2::name%]"
@ -320,7 +320,7 @@
"name": "Lifetime total energy input 3" "name": "Lifetime total energy input 3"
}, },
"tlx_energy_today_input_3": { "tlx_energy_today_input_3": {
"name": "Energy Today Input 3" "name": "Energy today input 3"
}, },
"tlx_voltage_input_3": { "tlx_voltage_input_3": {
"name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_3::name%]" "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_3::name%]"
@ -335,16 +335,16 @@
"name": "Lifetime total energy input 4" "name": "Lifetime total energy input 4"
}, },
"tlx_energy_today_input_4": { "tlx_energy_today_input_4": {
"name": "Energy Today Input 4" "name": "Energy today input 4"
}, },
"tlx_voltage_input_4": { "tlx_voltage_input_4": {
"name": "Input 4 voltage" "name": "Input 4 voltage"
}, },
"tlx_amperage_input_4": { "tlx_amperage_input_4": {
"name": "Input 4 Amperage" "name": "Input 4 amperage"
}, },
"tlx_wattage_input_4": { "tlx_wattage_input_4": {
"name": "Input 4 Wattage" "name": "Input 4 wattage"
}, },
"tlx_solar_generation_total": { "tlx_solar_generation_total": {
"name": "Lifetime total solar energy" "name": "Lifetime total solar energy"
@ -434,10 +434,10 @@
"name": "Money lifetime" "name": "Money lifetime"
}, },
"total_energy_today": { "total_energy_today": {
"name": "Energy Today" "name": "Energy today"
}, },
"total_output_power": { "total_output_power": {
"name": "Output Power" "name": "Output power"
}, },
"total_energy_output": { "total_energy_output": {
"name": "[%key:component::growatt_server::entity::sensor::inverter_energy_total::name%]" "name": "[%key:component::growatt_server::entity::sensor::inverter_energy_total::name%]"

View File

@ -768,7 +768,7 @@
"description": "[%key:component::habitica::common::notes_description%]" "description": "[%key:component::habitica::common::notes_description%]"
}, },
"tag": { "tag": {
"name": "[%key:component::habitica::common::tag_name%]", "name": "[%key:component::habitica::common::tag_options_name%]",
"description": "[%key:component::habitica::common::tag_description%]" "description": "[%key:component::habitica::common::tag_description%]"
}, },
"alias": { "alias": {
@ -868,7 +868,7 @@
"description": "[%key:component::habitica::common::notes_description%]" "description": "[%key:component::habitica::common::notes_description%]"
}, },
"tag": { "tag": {
"name": "[%key:component::habitica::common::tag_name%]", "name": "[%key:component::habitica::common::tag_options_name%]",
"description": "[%key:component::habitica::common::tag_description%]" "description": "[%key:component::habitica::common::tag_description%]"
}, },
"alias": { "alias": {
@ -1008,7 +1008,7 @@
"description": "[%key:component::habitica::common::notes_description%]" "description": "[%key:component::habitica::common::notes_description%]"
}, },
"tag": { "tag": {
"name": "[%key:component::habitica::common::tag_name%]", "name": "[%key:component::habitica::common::tag_options_name%]",
"description": "[%key:component::habitica::common::tag_description%]" "description": "[%key:component::habitica::common::tag_description%]"
}, },
"alias": { "alias": {
@ -1024,11 +1024,11 @@
"description": "[%key:component::habitica::common::date_description%]" "description": "[%key:component::habitica::common::date_description%]"
}, },
"reminder": { "reminder": {
"name": "[%key:component::habitica::common::reminder_name%]", "name": "[%key:component::habitica::common::reminder_options_name%]",
"description": "[%key:component::habitica::common::reminder_description%]" "description": "[%key:component::habitica::common::reminder_description%]"
}, },
"add_checklist_item": { "add_checklist_item": {
"name": "[%key:component::habitica::common::add_checklist_item_name%]", "name": "[%key:component::habitica::common::checklist_options_name%]",
"description": "[%key:component::habitica::common::add_checklist_item_description%]" "description": "[%key:component::habitica::common::add_checklist_item_description%]"
} }
}, },

View File

@ -3,27 +3,35 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine, Sequence from collections.abc import Awaitable, Callable, Coroutine, Sequence
from contextlib import suppress
from datetime import datetime from datetime import datetime
from functools import reduce, wraps from functools import reduce, wraps
import logging
from operator import ior from operator import ior
from typing import Any from typing import Any, Final
from pyheos import ( from pyheos import (
AddCriteriaType, AddCriteriaType,
ControlType, ControlType,
HeosError, HeosError,
HeosPlayer, HeosPlayer,
MediaItem,
MediaMusicSource,
MediaType as HeosMediaType,
PlayState, PlayState,
RepeatType, RepeatType,
const as heos_const, const as heos_const,
) )
from pyheos.util import mediauri as heos_source
import voluptuous as vol import voluptuous as vol
from homeassistant.components import media_source from homeassistant.components import media_source
from homeassistant.components.media_player import ( from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_LEVEL,
BrowseError,
BrowseMedia, BrowseMedia,
MediaClass,
MediaPlayerEnqueue, MediaPlayerEnqueue,
MediaPlayerEntity, MediaPlayerEntity,
MediaPlayerEntityFeature, MediaPlayerEntityFeature,
@ -32,6 +40,7 @@ from homeassistant.components.media_player import (
RepeatMode, RepeatMode,
async_process_play_media_url, async_process_play_media_url,
) )
from homeassistant.components.media_source import BrowseMediaSource
from homeassistant.const import Platform from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
@ -55,6 +64,8 @@ from .coordinator import HeosConfigEntry, HeosCoordinator
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
BROWSE_ROOT: Final = "heos://media"
BASE_SUPPORTED_FEATURES = ( BASE_SUPPORTED_FEATURES = (
MediaPlayerEntityFeature.VOLUME_MUTE MediaPlayerEntityFeature.VOLUME_MUTE
| MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_SET
@ -97,6 +108,21 @@ HEOS_HA_REPEAT_TYPE_MAP = {
} }
HA_HEOS_REPEAT_TYPE_MAP = {v: k for k, v in HEOS_HA_REPEAT_TYPE_MAP.items()} HA_HEOS_REPEAT_TYPE_MAP = {v: k for k, v in HEOS_HA_REPEAT_TYPE_MAP.items()}
HEOS_MEDIA_TYPE_TO_MEDIA_CLASS = {
HeosMediaType.ALBUM: MediaClass.ALBUM,
HeosMediaType.ARTIST: MediaClass.ARTIST,
HeosMediaType.CONTAINER: MediaClass.DIRECTORY,
HeosMediaType.GENRE: MediaClass.GENRE,
HeosMediaType.HEOS_SERVER: MediaClass.DIRECTORY,
HeosMediaType.HEOS_SERVICE: MediaClass.DIRECTORY,
HeosMediaType.MUSIC_SERVICE: MediaClass.DIRECTORY,
HeosMediaType.PLAYLIST: MediaClass.PLAYLIST,
HeosMediaType.SONG: MediaClass.TRACK,
HeosMediaType.STATION: MediaClass.TRACK,
}
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, hass: HomeAssistant,
@ -282,6 +308,16 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity):
self, media_type: MediaType | str, media_id: str, **kwargs: Any self, media_type: MediaType | str, media_id: str, **kwargs: Any
) -> None: ) -> None:
"""Play a piece of media.""" """Play a piece of media."""
if heos_source.is_media_uri(media_id):
media, data = heos_source.from_media_uri(media_id)
if not isinstance(media, MediaItem):
raise ValueError(f"Invalid media id '{media_id}'")
await self._player.play_media(
media,
HA_HEOS_ENQUEUE_MAP[kwargs.get(ATTR_MEDIA_ENQUEUE)],
)
return
if media_source.is_media_source_id(media_id): if media_source.is_media_source_id(media_id):
media_type = MediaType.URL media_type = MediaType.URL
play_item = await media_source.async_resolve_media( play_item = await media_source.async_resolve_media(
@ -534,14 +570,101 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity):
"""Volume level of the media player (0..1).""" """Volume level of the media player (0..1)."""
return self._player.volume / 100 return self._player.volume / 100
async def _async_browse_media_root(self) -> BrowseMedia:
"""Return media browsing root."""
if not self.coordinator.heos.music_sources:
try:
await self.coordinator.heos.get_music_sources()
except HeosError as error:
_LOGGER.debug("Unable to load music sources: %s", error)
children: list[BrowseMedia] = [
_media_to_browse_media(source)
for source in self.coordinator.heos.music_sources.values()
if source.available
]
root = BrowseMedia(
title="Music Sources",
media_class=MediaClass.DIRECTORY,
children_media_class=MediaClass.DIRECTORY,
media_content_type="",
media_content_id=BROWSE_ROOT,
can_expand=True,
can_play=False,
children=children,
)
# Append media source items
with suppress(BrowseError):
browse = await self._async_browse_media_source()
# If domain is None, it's an overview of available sources
if browse.domain is None and browse.children:
children.extend(browse.children)
else:
children.append(browse)
return root
async def _async_browse_heos_media(self, media_content_id: str) -> BrowseMedia:
"""Browse a HEOS media item."""
media, data = heos_source.from_media_uri(media_content_id)
browse_media = _media_to_browse_media(media)
try:
browse_result = await self.coordinator.heos.browse_media(media)
except HeosError as error:
_LOGGER.debug("Unable to browse media %s: %s", media, error)
else:
browse_media.children = [
_media_to_browse_media(item)
for item in browse_result.items
if item.browsable or item.playable
]
return browse_media
async def _async_browse_media_source(
self, media_content_id: str | None = None
) -> BrowseMediaSource:
"""Browse a media source item."""
return await media_source.async_browse_media(
self.hass,
media_content_id,
content_filter=lambda item: item.media_content_type.startswith("audio/"),
)
async def async_browse_media( async def async_browse_media(
self, self,
media_content_type: MediaType | str | None = None, media_content_type: MediaType | str | None = None,
media_content_id: str | None = None, media_content_id: str | None = None,
) -> BrowseMedia: ) -> BrowseMedia:
"""Implement the websocket media browsing helper.""" """Implement the websocket media browsing helper."""
return await media_source.async_browse_media( if media_content_id in (None, BROWSE_ROOT):
self.hass, return await self._async_browse_media_root()
media_content_id, assert media_content_id is not None
content_filter=lambda item: item.media_content_type.startswith("audio/"), if heos_source.is_media_uri(media_content_id):
return await self._async_browse_heos_media(media_content_id)
if media_source.is_media_source_id(media_content_id):
return await self._async_browse_media_source(media_content_id)
raise ServiceValidationError(
translation_domain=HEOS_DOMAIN,
translation_key="unsupported_media_content_id",
translation_placeholders={"media_content_id": media_content_id},
)
def _media_to_browse_media(media: MediaItem | MediaMusicSource) -> BrowseMedia:
"""Convert a HEOS media item to a browse media item."""
can_expand = False
can_play = False
if isinstance(media, MediaMusicSource):
can_expand = media.available
else:
can_expand = media.browsable
can_play = media.playable
return BrowseMedia(
can_expand=can_expand,
can_play=can_play,
media_content_id=heos_source.to_media_uri(media),
media_content_type="",
media_class=HEOS_MEDIA_TYPE_TO_MEDIA_CLASS[media.type],
title=media.name,
thumbnail=media.image_url,
) )

View File

@ -146,6 +146,9 @@
}, },
"unknown_source": { "unknown_source": {
"message": "Unknown source: {source}" "message": "Unknown source: {source}"
},
"unsupported_media_content_id": {
"message": "Unsupported media_content_id: {media_content_id}"
} }
}, },
"issues": { "issues": {

View File

@ -629,14 +629,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeConnectConfigEntry)
home_connect_client = HomeConnectClient(config_entry_auth) home_connect_client = HomeConnectClient(config_entry_auth)
coordinator = HomeConnectCoordinator(hass, entry, home_connect_client) coordinator = HomeConnectCoordinator(hass, entry, home_connect_client)
await coordinator.async_config_entry_first_refresh() await coordinator.async_setup()
entry.runtime_data = coordinator entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
entry.runtime_data.start_event_listener() entry.runtime_data.start_event_listener()
entry.async_create_background_task(
hass,
coordinator.async_refresh(),
f"home_connect-initial-full-refresh-{entry.entry_id}",
)
return True return True

View File

@ -102,7 +102,7 @@ class HomeConnectButtonEntity(HomeConnectEntity, ButtonEntity):
) )
self.entity_description = desc self.entity_description = desc
self.appliance = appliance self.appliance = appliance
self.unique_id = f"{appliance.info.ha_id}-{desc.key}" self._attr_unique_id = f"{appliance.info.ha_id}-{desc.key}"
def update_native_value(self) -> None: def update_native_value(self) -> None:
"""Set the value of the entity.""" """Set the value of the entity."""

View File

@ -137,41 +137,6 @@ def setup_home_connect_entry(
defaultdict(list) defaultdict(list)
) )
entities: list[HomeConnectEntity] = []
for appliance in entry.runtime_data.data.values():
entities_to_add = get_entities_for_appliance(entry, appliance)
if get_option_entities_for_appliance:
entities_to_add.extend(get_option_entities_for_appliance(entry, appliance))
for event_key in (
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
):
changed_options_listener_remove_callback = (
entry.runtime_data.async_add_listener(
partial(
_create_option_entities,
entry,
appliance,
known_entity_unique_ids,
get_option_entities_for_appliance,
async_add_entities,
),
(appliance.info.ha_id, event_key),
)
)
entry.async_on_unload(changed_options_listener_remove_callback)
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
changed_options_listener_remove_callback
)
known_entity_unique_ids.update(
{
cast(str, entity.unique_id): appliance.info.ha_id
for entity in entities_to_add
}
)
entities.extend(entities_to_add)
async_add_entities(entities)
entry.async_on_unload( entry.async_on_unload(
entry.runtime_data.async_add_special_listener( entry.runtime_data.async_add_special_listener(
partial( partial(

View File

@ -10,6 +10,7 @@ from .utils import bsh_key_to_translation_key
DOMAIN = "home_connect" DOMAIN = "home_connect"
API_DEFAULT_RETRY_AFTER = 60
APPLIANCES_WITH_PROGRAMS = ( APPLIANCES_WITH_PROGRAMS = (
"CleaningRobot", "CleaningRobot",

View File

@ -2,7 +2,7 @@
from __future__ import annotations from __future__ import annotations
import asyncio from asyncio import sleep as asyncio_sleep
from collections import defaultdict from collections import defaultdict
from collections.abc import Callable from collections.abc import Callable
from dataclasses import dataclass from dataclasses import dataclass
@ -29,6 +29,7 @@ from aiohomeconnect.model.error import (
HomeConnectApiError, HomeConnectApiError,
HomeConnectError, HomeConnectError,
HomeConnectRequestError, HomeConnectRequestError,
TooManyRequestsError,
UnauthorizedError, UnauthorizedError,
) )
from aiohomeconnect.model.program import EnumerateProgram, ProgramDefinitionOption from aiohomeconnect.model.program import EnumerateProgram, ProgramDefinitionOption
@ -36,11 +37,11 @@ from propcache.api import cached_property
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import APPLIANCES_WITH_PROGRAMS, DOMAIN from .const import API_DEFAULT_RETRY_AFTER, APPLIANCES_WITH_PROGRAMS, DOMAIN
from .utils import get_dict_from_home_connect_error from .utils import get_dict_from_home_connect_error
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -154,7 +155,7 @@ class HomeConnectCoordinator(
f"home_connect-events_listener_task-{self.config_entry.entry_id}", f"home_connect-events_listener_task-{self.config_entry.entry_id}",
) )
async def _event_listener(self) -> None: async def _event_listener(self) -> None: # noqa: C901
"""Match event with listener for event type.""" """Match event with listener for event type."""
retry_time = 10 retry_time = 10
while True: while True:
@ -231,15 +232,15 @@ class HomeConnectCoordinator(
self.data[event_message_ha_id].update(appliance_data) self.data[event_message_ha_id].update(appliance_data)
else: else:
self.data[event_message_ha_id] = appliance_data self.data[event_message_ha_id] = appliance_data
for listener, context in list( for listener, context in self._special_listeners.values():
self._special_listeners.values()
) + list(self._listeners.values()):
assert isinstance(context, tuple)
if ( if (
EventKey.BSH_COMMON_APPLIANCE_DEPAIRED EventKey.BSH_COMMON_APPLIANCE_DEPAIRED
not in context not in context
): ):
listener() listener()
self._call_all_event_listeners_for_appliance(
event_message_ha_id
)
case EventType.DISCONNECTED: case EventType.DISCONNECTED:
self.data[event_message_ha_id].info.connected = False self.data[event_message_ha_id].info.connected = False
@ -269,7 +270,7 @@ class HomeConnectCoordinator(
error, error,
retry_time, retry_time,
) )
await asyncio.sleep(retry_time) await asyncio_sleep(retry_time)
retry_time = min(retry_time * 2, 3600) retry_time = min(retry_time * 2, 3600)
except HomeConnectApiError as error: except HomeConnectApiError as error:
_LOGGER.error("Error while listening for events: %s", error) _LOGGER.error("Error while listening for events: %s", error)
@ -278,6 +279,13 @@ class HomeConnectCoordinator(
) )
break break
# Trigger to delete the possible depaired device entities
# from known_entities variable at common.py
for listener, context in self._special_listeners.values():
assert isinstance(context, tuple)
if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED in context:
listener()
@callback @callback
def _call_event_listener(self, event_message: EventMessage) -> None: def _call_event_listener(self, event_message: EventMessage) -> None:
"""Call listener for event.""" """Call listener for event."""
@ -295,6 +303,42 @@ class HomeConnectCoordinator(
async def _async_update_data(self) -> dict[str, HomeConnectApplianceData]: async def _async_update_data(self) -> dict[str, HomeConnectApplianceData]:
"""Fetch data from Home Connect.""" """Fetch data from Home Connect."""
await self._async_setup()
for appliance_data in self.data.values():
appliance = appliance_data.info
ha_id = appliance.ha_id
while True:
try:
self.data[ha_id] = await self._get_appliance_data(
appliance, self.data.get(ha_id)
)
except TooManyRequestsError as err:
_LOGGER.debug(
"Rate limit exceeded on initial fetch: %s",
err,
)
await asyncio_sleep(err.retry_after or API_DEFAULT_RETRY_AFTER)
else:
break
for listener, context in self._special_listeners.values():
assert isinstance(context, tuple)
if EventKey.BSH_COMMON_APPLIANCE_PAIRED in context:
listener()
return self.data
async def async_setup(self) -> None:
"""Set up the devices."""
try:
await self._async_setup()
except UpdateFailed as err:
raise ConfigEntryNotReady from err
async def _async_setup(self) -> None:
"""Set up the devices."""
old_appliances = set(self.data.keys())
try: try:
appliances = await self.client.get_home_appliances() appliances = await self.client.get_home_appliances()
except UnauthorizedError as error: except UnauthorizedError as error:
@ -312,12 +356,38 @@ class HomeConnectCoordinator(
translation_placeholders=get_dict_from_home_connect_error(error), translation_placeholders=get_dict_from_home_connect_error(error),
) from error ) from error
return { for appliance in appliances.homeappliances:
appliance.ha_id: await self._get_appliance_data( self.device_registry.async_get_or_create(
appliance, self.data.get(appliance.ha_id) config_entry_id=self.config_entry.entry_id,
identifiers={(DOMAIN, appliance.ha_id)},
manufacturer=appliance.brand,
name=appliance.name,
model=appliance.vib,
)
if appliance.ha_id not in self.data:
self.data[appliance.ha_id] = HomeConnectApplianceData(
commands=set(),
events={},
info=appliance,
options={},
programs=[],
settings={},
status={},
)
else:
self.data[appliance.ha_id].info.connected = appliance.connected
old_appliances.remove(appliance.ha_id)
for ha_id in old_appliances:
self.data.pop(ha_id, None)
device = self.device_registry.async_get_device(
identifiers={(DOMAIN, ha_id)}
)
if device:
self.device_registry.async_update_device(
device_id=device.id,
remove_config_entry_id=self.config_entry.entry_id,
) )
for appliance in appliances.homeappliances
}
async def _get_appliance_data( async def _get_appliance_data(
self, self,
@ -339,6 +409,8 @@ class HomeConnectCoordinator(
await self.client.get_settings(appliance.ha_id) await self.client.get_settings(appliance.ha_id)
).settings ).settings
} }
except TooManyRequestsError:
raise
except HomeConnectError as error: except HomeConnectError as error:
_LOGGER.debug( _LOGGER.debug(
"Error fetching settings for %s: %s", "Error fetching settings for %s: %s",
@ -351,6 +423,8 @@ class HomeConnectCoordinator(
status.key: status status.key: status
for status in (await self.client.get_status(appliance.ha_id)).status for status in (await self.client.get_status(appliance.ha_id)).status
} }
except TooManyRequestsError:
raise
except HomeConnectError as error: except HomeConnectError as error:
_LOGGER.debug( _LOGGER.debug(
"Error fetching status for %s: %s", "Error fetching status for %s: %s",
@ -365,6 +439,8 @@ class HomeConnectCoordinator(
if appliance.type in APPLIANCES_WITH_PROGRAMS: if appliance.type in APPLIANCES_WITH_PROGRAMS:
try: try:
all_programs = await self.client.get_all_programs(appliance.ha_id) all_programs = await self.client.get_all_programs(appliance.ha_id)
except TooManyRequestsError:
raise
except HomeConnectError as error: except HomeConnectError as error:
_LOGGER.debug( _LOGGER.debug(
"Error fetching programs for %s: %s", "Error fetching programs for %s: %s",
@ -421,6 +497,8 @@ class HomeConnectCoordinator(
await self.client.get_available_commands(appliance.ha_id) await self.client.get_available_commands(appliance.ha_id)
).commands ).commands
} }
except TooManyRequestsError:
raise
except HomeConnectError: except HomeConnectError:
commands = set() commands = set()
@ -455,6 +533,8 @@ class HomeConnectCoordinator(
).options ).options
or [] or []
} }
except TooManyRequestsError:
raise
except HomeConnectError as error: except HomeConnectError as error:
_LOGGER.debug( _LOGGER.debug(
"Error fetching options for %s: %s", "Error fetching options for %s: %s",

View File

@ -1,21 +1,28 @@
"""Home Connect entity base class.""" """Home Connect entity base class."""
from abc import abstractmethod from abc import abstractmethod
from collections.abc import Callable, Coroutine
import contextlib import contextlib
from datetime import datetime
import logging import logging
from typing import cast from typing import Any, Concatenate, cast
from aiohomeconnect.model import EventKey, OptionKey from aiohomeconnect.model import EventKey, OptionKey
from aiohomeconnect.model.error import ActiveProgramNotSetError, HomeConnectError from aiohomeconnect.model.error import (
ActiveProgramNotSetError,
HomeConnectError,
TooManyRequestsError,
)
from homeassistant.const import STATE_UNAVAILABLE from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import callback from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN from .const import API_DEFAULT_RETRY_AFTER, DOMAIN
from .coordinator import HomeConnectApplianceData, HomeConnectCoordinator from .coordinator import HomeConnectApplianceData, HomeConnectCoordinator
from .utils import get_dict_from_home_connect_error from .utils import get_dict_from_home_connect_error
@ -127,3 +134,34 @@ class HomeConnectOptionEntity(HomeConnectEntity):
def bsh_key(self) -> OptionKey: def bsh_key(self) -> OptionKey:
"""Return the BSH key.""" """Return the BSH key."""
return cast(OptionKey, self.entity_description.key) return cast(OptionKey, self.entity_description.key)
def constraint_fetcher[_EntityT: HomeConnectEntity, **_P](
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate the function to catch Home Connect too many requests error and retry later.
If it needs to be called later, it will call async_write_ha_state function
"""
async def handler_to_return(
self: _EntityT, *args: _P.args, **kwargs: _P.kwargs
) -> None:
async def handler(_datetime: datetime | None = None) -> None:
try:
await func(self, *args, **kwargs)
except TooManyRequestsError as err:
if (retry_after := err.retry_after) is None:
retry_after = API_DEFAULT_RETRY_AFTER
async_call_later(self.hass, retry_after, handler)
except HomeConnectError as err:
_LOGGER.error(
"Error fetching constraints for %s: %s", self.entity_id, err
)
else:
if _datetime is not None:
self.async_write_ha_state()
await handler()
return handler_to_return

View File

@ -25,7 +25,7 @@ from .const import (
UNIT_MAP, UNIT_MAP,
) )
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
from .entity import HomeConnectEntity, HomeConnectOptionEntity from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher
from .utils import get_dict_from_home_connect_error from .utils import get_dict_from_home_connect_error
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -189,19 +189,25 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity):
}, },
) from err ) from err
@constraint_fetcher
async def async_fetch_constraints(self) -> None: async def async_fetch_constraints(self) -> None:
"""Fetch the max and min values and step for the number entity.""" """Fetch the max and min values and step for the number entity."""
try: setting_key = cast(SettingKey, self.bsh_key)
data = self.appliance.settings.get(setting_key)
if not data or not data.unit or not data.constraints:
data = await self.coordinator.client.get_setting( data = await self.coordinator.client.get_setting(
self.appliance.info.ha_id, setting_key=SettingKey(self.bsh_key) self.appliance.info.ha_id, setting_key=setting_key
) )
except HomeConnectError as err: if data.unit:
_LOGGER.error("An error occurred: %s", err) self._attr_native_unit_of_measurement = data.unit
else:
self.set_constraints(data) self.set_constraints(data)
def set_constraints(self, setting: GetSetting) -> None: def set_constraints(self, setting: GetSetting) -> None:
"""Set constraints for the number entity.""" """Set constraints for the number entity."""
if setting.unit:
self._attr_native_unit_of_measurement = UNIT_MAP.get(
setting.unit, setting.unit
)
if not (constraints := setting.constraints): if not (constraints := setting.constraints):
return return
if constraints.max: if constraints.max:
@ -222,10 +228,10 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity):
"""When entity is added to hass.""" """When entity is added to hass."""
await super().async_added_to_hass() await super().async_added_to_hass()
data = self.appliance.settings[cast(SettingKey, self.bsh_key)] data = self.appliance.settings[cast(SettingKey, self.bsh_key)]
self._attr_native_unit_of_measurement = data.unit
self.set_constraints(data) self.set_constraints(data)
if ( if (
not hasattr(self, "_attr_native_min_value") not hasattr(self, "_attr_native_unit_of_measurement")
or not hasattr(self, "_attr_native_min_value")
or not hasattr(self, "_attr_native_max_value") or not hasattr(self, "_attr_native_max_value")
or not hasattr(self, "_attr_native_step") or not hasattr(self, "_attr_native_step")
): ):
@ -253,7 +259,6 @@ class HomeConnectOptionNumberEntity(HomeConnectOptionEntity, NumberEntity):
or candidate_unit != self._attr_native_unit_of_measurement or candidate_unit != self._attr_native_unit_of_measurement
): ):
self._attr_native_unit_of_measurement = candidate_unit self._attr_native_unit_of_measurement = candidate_unit
self.__dict__.pop("unit_of_measurement", None)
option_constraints = option_definition.constraints option_constraints = option_definition.constraints
if option_constraints: if option_constraints:
if ( if (

View File

@ -1,8 +1,8 @@
"""Provides a select platform for Home Connect.""" """Provides a select platform for Home Connect."""
from collections.abc import Callable, Coroutine from collections.abc import Callable, Coroutine
import contextlib
from dataclasses import dataclass from dataclasses import dataclass
import logging
from typing import Any, cast from typing import Any, cast
from aiohomeconnect.client import Client as HomeConnectClient from aiohomeconnect.client import Client as HomeConnectClient
@ -47,9 +47,11 @@ from .coordinator import (
HomeConnectConfigEntry, HomeConnectConfigEntry,
HomeConnectCoordinator, HomeConnectCoordinator,
) )
from .entity import HomeConnectEntity, HomeConnectOptionEntity from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher
from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
FUNCTIONAL_LIGHT_COLOR_TEMPERATURE_ENUM = { FUNCTIONAL_LIGHT_COLOR_TEMPERATURE_ENUM = {
@ -413,6 +415,7 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity):
"""Select setting class for Home Connect.""" """Select setting class for Home Connect."""
entity_description: HomeConnectSelectEntityDescription entity_description: HomeConnectSelectEntityDescription
_original_option_keys: set[str | None]
def __init__( def __init__(
self, self,
@ -421,6 +424,7 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity):
desc: HomeConnectSelectEntityDescription, desc: HomeConnectSelectEntityDescription,
) -> None: ) -> None:
"""Initialize the entity.""" """Initialize the entity."""
self._original_option_keys = set(desc.values_translation_key)
super().__init__( super().__init__(
coordinator, coordinator,
appliance, appliance,
@ -458,23 +462,29 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity):
async def async_added_to_hass(self) -> None: async def async_added_to_hass(self) -> None:
"""When entity is added to hass.""" """When entity is added to hass."""
await super().async_added_to_hass() await super().async_added_to_hass()
await self.async_fetch_options()
@constraint_fetcher
async def async_fetch_options(self) -> None:
"""Fetch options from the API."""
setting = self.appliance.settings.get(cast(SettingKey, self.bsh_key)) setting = self.appliance.settings.get(cast(SettingKey, self.bsh_key))
if ( if (
not setting not setting
or not setting.constraints or not setting.constraints
or not setting.constraints.allowed_values or not setting.constraints.allowed_values
): ):
with contextlib.suppress(HomeConnectError):
setting = await self.coordinator.client.get_setting( setting = await self.coordinator.client.get_setting(
self.appliance.info.ha_id, self.appliance.info.ha_id,
setting_key=cast(SettingKey, self.bsh_key), setting_key=cast(SettingKey, self.bsh_key),
) )
if setting and setting.constraints and setting.constraints.allowed_values: if setting and setting.constraints and setting.constraints.allowed_values:
self._original_option_keys = set(setting.constraints.allowed_values)
self._attr_options = [ self._attr_options = [
self.entity_description.values_translation_key[option] self.entity_description.values_translation_key[option]
for option in setting.constraints.allowed_values for option in self._original_option_keys
if option in self.entity_description.values_translation_key if option is not None
and option in self.entity_description.values_translation_key
] ]
@ -491,7 +501,7 @@ class HomeConnectSelectOptionEntity(HomeConnectOptionEntity, SelectEntity):
desc: HomeConnectSelectEntityDescription, desc: HomeConnectSelectEntityDescription,
) -> None: ) -> None:
"""Initialize the entity.""" """Initialize the entity."""
self._original_option_keys = set(desc.values_translation_key.keys()) self._original_option_keys = set(desc.values_translation_key)
super().__init__( super().__init__(
coordinator, coordinator,
appliance, appliance,
@ -524,5 +534,5 @@ class HomeConnectSelectOptionEntity(HomeConnectOptionEntity, SelectEntity):
self.entity_description.values_translation_key[option] self.entity_description.values_translation_key[option]
for option in self._original_option_keys for option in self._original_option_keys
if option is not None if option is not None
and option in self.entity_description.values_translation_key
] ]
self.__dict__.pop("options", None)

View File

@ -1,12 +1,11 @@
"""Provides a sensor for Home Connect.""" """Provides a sensor for Home Connect."""
import contextlib
from dataclasses import dataclass from dataclasses import dataclass
from datetime import timedelta from datetime import timedelta
import logging
from typing import cast from typing import cast
from aiohomeconnect.model import EventKey, StatusKey from aiohomeconnect.model import EventKey, StatusKey
from aiohomeconnect.model.error import HomeConnectError
from homeassistant.components.sensor import ( from homeassistant.components.sensor import (
SensorDeviceClass, SensorDeviceClass,
@ -28,7 +27,9 @@ from .const import (
UNIT_MAP, UNIT_MAP,
) )
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
from .entity import HomeConnectEntity from .entity import HomeConnectEntity, constraint_fetcher
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0 PARALLEL_UPDATES = 0
@ -335,16 +336,14 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
else: else:
await self.fetch_unit() await self.fetch_unit()
@constraint_fetcher
async def fetch_unit(self) -> None: async def fetch_unit(self) -> None:
"""Fetch the unit of measurement.""" """Fetch the unit of measurement."""
with contextlib.suppress(HomeConnectError):
data = await self.coordinator.client.get_status_value( data = await self.coordinator.client.get_status_value(
self.appliance.info.ha_id, status_key=cast(StatusKey, self.bsh_key) self.appliance.info.ha_id, status_key=cast(StatusKey, self.bsh_key)
) )
if data.unit: if data.unit:
self._attr_native_unit_of_measurement = UNIT_MAP.get( self._attr_native_unit_of_measurement = UNIT_MAP.get(data.unit, data.unit)
data.unit, data.unit
)
class HomeConnectProgramSensor(HomeConnectSensor): class HomeConnectProgramSensor(HomeConnectSensor):

View File

@ -468,11 +468,11 @@ set_program_and_options:
translation_key: venting_level translation_key: venting_level
options: options:
- cooking_hood_enum_type_stage_fan_off - cooking_hood_enum_type_stage_fan_off
- cooking_hood_enum_type_stage_fan_stage01 - cooking_hood_enum_type_stage_fan_stage_01
- cooking_hood_enum_type_stage_fan_stage02 - cooking_hood_enum_type_stage_fan_stage_02
- cooking_hood_enum_type_stage_fan_stage03 - cooking_hood_enum_type_stage_fan_stage_03
- cooking_hood_enum_type_stage_fan_stage04 - cooking_hood_enum_type_stage_fan_stage_04
- cooking_hood_enum_type_stage_fan_stage05 - cooking_hood_enum_type_stage_fan_stage_05
cooking_hood_option_intensive_level: cooking_hood_option_intensive_level:
example: cooking_hood_enum_type_intensive_stage_intensive_stage1 example: cooking_hood_enum_type_intensive_stage_intensive_stage1
required: false required: false
@ -528,7 +528,7 @@ set_program_and_options:
collapsed: true collapsed: true
fields: fields:
laundry_care_washer_option_temperature: laundry_care_washer_option_temperature:
example: laundry_care_washer_enum_type_temperature_g_c40 example: laundry_care_washer_enum_type_temperature_g_c_40
required: false required: false
selector: selector:
select: select:
@ -536,14 +536,14 @@ set_program_and_options:
translation_key: washer_temperature translation_key: washer_temperature
options: options:
- laundry_care_washer_enum_type_temperature_cold - laundry_care_washer_enum_type_temperature_cold
- laundry_care_washer_enum_type_temperature_g_c20 - laundry_care_washer_enum_type_temperature_g_c_20
- laundry_care_washer_enum_type_temperature_g_c30 - laundry_care_washer_enum_type_temperature_g_c_30
- laundry_care_washer_enum_type_temperature_g_c40 - laundry_care_washer_enum_type_temperature_g_c_40
- laundry_care_washer_enum_type_temperature_g_c50 - laundry_care_washer_enum_type_temperature_g_c_50
- laundry_care_washer_enum_type_temperature_g_c60 - laundry_care_washer_enum_type_temperature_g_c_60
- laundry_care_washer_enum_type_temperature_g_c70 - laundry_care_washer_enum_type_temperature_g_c_70
- laundry_care_washer_enum_type_temperature_g_c80 - laundry_care_washer_enum_type_temperature_g_c_80
- laundry_care_washer_enum_type_temperature_g_c90 - laundry_care_washer_enum_type_temperature_g_c_90
- laundry_care_washer_enum_type_temperature_ul_cold - laundry_care_washer_enum_type_temperature_ul_cold
- laundry_care_washer_enum_type_temperature_ul_warm - laundry_care_washer_enum_type_temperature_ul_warm
- laundry_care_washer_enum_type_temperature_ul_hot - laundry_care_washer_enum_type_temperature_ul_hot
@ -557,15 +557,15 @@ set_program_and_options:
translation_key: spin_speed translation_key: spin_speed
options: options:
- laundry_care_washer_enum_type_spin_speed_off - laundry_care_washer_enum_type_spin_speed_off
- laundry_care_washer_enum_type_spin_speed_r_p_m400 - laundry_care_washer_enum_type_spin_speed_r_p_m_400
- laundry_care_washer_enum_type_spin_speed_r_p_m600 - laundry_care_washer_enum_type_spin_speed_r_p_m_600
- laundry_care_washer_enum_type_spin_speed_r_p_m700 - laundry_care_washer_enum_type_spin_speed_r_p_m_700
- laundry_care_washer_enum_type_spin_speed_r_p_m800 - laundry_care_washer_enum_type_spin_speed_r_p_m_800
- laundry_care_washer_enum_type_spin_speed_r_p_m900 - laundry_care_washer_enum_type_spin_speed_r_p_m_900
- laundry_care_washer_enum_type_spin_speed_r_p_m1000 - laundry_care_washer_enum_type_spin_speed_r_p_m_1000
- laundry_care_washer_enum_type_spin_speed_r_p_m1200 - laundry_care_washer_enum_type_spin_speed_r_p_m_1200
- laundry_care_washer_enum_type_spin_speed_r_p_m1400 - laundry_care_washer_enum_type_spin_speed_r_p_m_1400
- laundry_care_washer_enum_type_spin_speed_r_p_m1600 - laundry_care_washer_enum_type_spin_speed_r_p_m_1600
- laundry_care_washer_enum_type_spin_speed_ul_off - laundry_care_washer_enum_type_spin_speed_ul_off
- laundry_care_washer_enum_type_spin_speed_ul_low - laundry_care_washer_enum_type_spin_speed_ul_low
- laundry_care_washer_enum_type_spin_speed_ul_medium - laundry_care_washer_enum_type_spin_speed_ul_medium

View File

@ -417,11 +417,11 @@
"venting_level": { "venting_level": {
"options": { "options": {
"cooking_hood_enum_type_stage_fan_off": "Fan off", "cooking_hood_enum_type_stage_fan_off": "Fan off",
"cooking_hood_enum_type_stage_fan_stage01": "Fan stage 1", "cooking_hood_enum_type_stage_fan_stage_01": "Fan stage 1",
"cooking_hood_enum_type_stage_fan_stage02": "Fan stage 2", "cooking_hood_enum_type_stage_fan_stage_02": "Fan stage 2",
"cooking_hood_enum_type_stage_fan_stage03": "Fan stage 3", "cooking_hood_enum_type_stage_fan_stage_03": "Fan stage 3",
"cooking_hood_enum_type_stage_fan_stage04": "Fan stage 4", "cooking_hood_enum_type_stage_fan_stage_04": "Fan stage 4",
"cooking_hood_enum_type_stage_fan_stage05": "Fan stage 5" "cooking_hood_enum_type_stage_fan_stage_05": "Fan stage 5"
} }
}, },
"intensive_level": { "intensive_level": {
@ -441,14 +441,14 @@
"washer_temperature": { "washer_temperature": {
"options": { "options": {
"laundry_care_washer_enum_type_temperature_cold": "Cold", "laundry_care_washer_enum_type_temperature_cold": "Cold",
"laundry_care_washer_enum_type_temperature_g_c20": "20ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_20": "20ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c30": "30ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_30": "30ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c40": "40ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_40": "40ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c50": "50ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_50": "50ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c60": "60ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_60": "60ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c70": "70ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_70": "70ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c80": "80ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_80": "80ºC clothes",
"laundry_care_washer_enum_type_temperature_g_c90": "90ºC clothes", "laundry_care_washer_enum_type_temperature_g_c_90": "90ºC clothes",
"laundry_care_washer_enum_type_temperature_ul_cold": "Cold", "laundry_care_washer_enum_type_temperature_ul_cold": "Cold",
"laundry_care_washer_enum_type_temperature_ul_warm": "Warm", "laundry_care_washer_enum_type_temperature_ul_warm": "Warm",
"laundry_care_washer_enum_type_temperature_ul_hot": "Hot", "laundry_care_washer_enum_type_temperature_ul_hot": "Hot",
@ -458,15 +458,15 @@
"spin_speed": { "spin_speed": {
"options": { "options": {
"laundry_care_washer_enum_type_spin_speed_off": "Off", "laundry_care_washer_enum_type_spin_speed_off": "Off",
"laundry_care_washer_enum_type_spin_speed_r_p_m400": "400 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_400": "400 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m600": "600 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_600": "600 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m700": "700 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_700": "700 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m800": "800 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_800": "800 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m900": "900 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_900": "900 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m1000": "1000 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_1000": "1000 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m1200": "1200 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_1200": "1200 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m1400": "1400 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_1400": "1400 rpm",
"laundry_care_washer_enum_type_spin_speed_r_p_m1600": "1600 rpm", "laundry_care_washer_enum_type_spin_speed_r_p_m_1600": "1600 rpm",
"laundry_care_washer_enum_type_spin_speed_ul_off": "Off", "laundry_care_washer_enum_type_spin_speed_ul_off": "Off",
"laundry_care_washer_enum_type_spin_speed_ul_low": "Low", "laundry_care_washer_enum_type_spin_speed_ul_low": "Low",
"laundry_care_washer_enum_type_spin_speed_ul_medium": "Medium", "laundry_care_washer_enum_type_spin_speed_ul_medium": "Medium",
@ -1384,11 +1384,11 @@
"name": "[%key:component::home_connect::services::set_program_and_options::fields::cooking_hood_option_venting_level::name%]", "name": "[%key:component::home_connect::services::set_program_and_options::fields::cooking_hood_option_venting_level::name%]",
"state": { "state": {
"cooking_hood_enum_type_stage_fan_off": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_off%]", "cooking_hood_enum_type_stage_fan_off": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_off%]",
"cooking_hood_enum_type_stage_fan_stage01": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage01%]", "cooking_hood_enum_type_stage_fan_stage_01": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_01%]",
"cooking_hood_enum_type_stage_fan_stage02": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage02%]", "cooking_hood_enum_type_stage_fan_stage_02": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_02%]",
"cooking_hood_enum_type_stage_fan_stage03": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage03%]", "cooking_hood_enum_type_stage_fan_stage_03": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_03%]",
"cooking_hood_enum_type_stage_fan_stage04": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage04%]", "cooking_hood_enum_type_stage_fan_stage_04": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_04%]",
"cooking_hood_enum_type_stage_fan_stage05": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage05%]" "cooking_hood_enum_type_stage_fan_stage_05": "[%key:component::home_connect::selector::venting_level::options::cooking_hood_enum_type_stage_fan_stage_05%]"
} }
}, },
"intensive_level": { "intensive_level": {
@ -1411,14 +1411,14 @@
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_temperature::name%]", "name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_temperature::name%]",
"state": { "state": {
"laundry_care_washer_enum_type_temperature_cold": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_cold%]", "laundry_care_washer_enum_type_temperature_cold": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_cold%]",
"laundry_care_washer_enum_type_temperature_g_c20": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c20%]", "laundry_care_washer_enum_type_temperature_g_c_20": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_20%]",
"laundry_care_washer_enum_type_temperature_g_c30": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c30%]", "laundry_care_washer_enum_type_temperature_g_c_30": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_30%]",
"laundry_care_washer_enum_type_temperature_g_c40": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c40%]", "laundry_care_washer_enum_type_temperature_g_c_40": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_40%]",
"laundry_care_washer_enum_type_temperature_g_c50": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c50%]", "laundry_care_washer_enum_type_temperature_g_c_50": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_50%]",
"laundry_care_washer_enum_type_temperature_g_c60": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c60%]", "laundry_care_washer_enum_type_temperature_g_c_60": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_60%]",
"laundry_care_washer_enum_type_temperature_g_c70": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c70%]", "laundry_care_washer_enum_type_temperature_g_c_70": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_70%]",
"laundry_care_washer_enum_type_temperature_g_c80": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c80%]", "laundry_care_washer_enum_type_temperature_g_c_80": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_80%]",
"laundry_care_washer_enum_type_temperature_g_c90": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c90%]", "laundry_care_washer_enum_type_temperature_g_c_90": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_g_c_90%]",
"laundry_care_washer_enum_type_temperature_ul_cold": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_cold%]", "laundry_care_washer_enum_type_temperature_ul_cold": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_cold%]",
"laundry_care_washer_enum_type_temperature_ul_warm": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_warm%]", "laundry_care_washer_enum_type_temperature_ul_warm": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_warm%]",
"laundry_care_washer_enum_type_temperature_ul_hot": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_hot%]", "laundry_care_washer_enum_type_temperature_ul_hot": "[%key:component::home_connect::selector::washer_temperature::options::laundry_care_washer_enum_type_temperature_ul_hot%]",
@ -1429,15 +1429,15 @@
"name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_spin_speed::name%]", "name": "[%key:component::home_connect::services::set_program_and_options::fields::laundry_care_washer_option_spin_speed::name%]",
"state": { "state": {
"laundry_care_washer_enum_type_spin_speed_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_off%]", "laundry_care_washer_enum_type_spin_speed_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_off%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m400%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_400%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m600%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_600%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m700": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m700%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_700": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_700%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m800": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m800%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_800": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_800%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m900": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m900%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_900": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_900%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m1000": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1000%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_1000": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1000%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m1200": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1200%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_1200": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1200%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m1400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1400%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_1400": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1400%]",
"laundry_care_washer_enum_type_spin_speed_r_p_m1600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m1600%]", "laundry_care_washer_enum_type_spin_speed_r_p_m_1600": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_r_p_m_1600%]",
"laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_off%]", "laundry_care_washer_enum_type_spin_speed_ul_off": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_off%]",
"laundry_care_washer_enum_type_spin_speed_ul_low": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_low%]", "laundry_care_washer_enum_type_spin_speed_ul_low": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_low%]",
"laundry_care_washer_enum_type_spin_speed_ul_medium": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_medium%]", "laundry_care_washer_enum_type_spin_speed_ul_medium": "[%key:component::home_connect::selector::spin_speed::options::laundry_care_washer_enum_type_spin_speed_ul_medium%]",

View File

@ -188,7 +188,7 @@
}, },
"reload_all": { "reload_all": {
"name": "Reload all", "name": "Reload all",
"description": "Reload all YAML configuration that can be reloaded without restarting Home Assistant." "description": "Reloads all YAML configuration that can be reloaded without restarting Home Assistant."
} }
}, },
"exceptions": { "exceptions": {

View File

@ -15,6 +15,7 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
PLATFORMS = [ PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.BUTTON, Platform.BUTTON,
Platform.COVER, Platform.COVER,
Platform.LIGHT, Platform.LIGHT,

View File

@ -0,0 +1,190 @@
"""The Homee binary sensor platform."""
from pyHomee.const import AttributeType
from pyHomee.model import HomeeAttribute
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HomeeConfigEntry
from .entity import HomeeEntity
PARALLEL_UPDATES = 0
BINARY_SENSOR_DESCRIPTIONS: dict[AttributeType, BinarySensorEntityDescription] = {
AttributeType.BATTERY_LOW_ALARM: BinarySensorEntityDescription(
key="battery",
device_class=BinarySensorDeviceClass.BATTERY,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.BLACKOUT_ALARM: BinarySensorEntityDescription(
key="blackout_alarm",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.COALARM: BinarySensorEntityDescription(
key="carbon_monoxide", device_class=BinarySensorDeviceClass.CO
),
AttributeType.CO2ALARM: BinarySensorEntityDescription(
key="carbon_dioxide", device_class=BinarySensorDeviceClass.PROBLEM
),
AttributeType.FLOOD_ALARM: BinarySensorEntityDescription(
key="flood",
device_class=BinarySensorDeviceClass.MOISTURE,
),
AttributeType.HIGH_TEMPERATURE_ALARM: BinarySensorEntityDescription(
key="high_temperature",
device_class=BinarySensorDeviceClass.HEAT,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.LEAK_ALARM: BinarySensorEntityDescription(
key="leak_alarm",
device_class=BinarySensorDeviceClass.PROBLEM,
),
AttributeType.LOAD_ALARM: BinarySensorEntityDescription(
key="load_alarm",
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.LOCK_STATE: BinarySensorEntityDescription(
key="lock",
device_class=BinarySensorDeviceClass.LOCK,
),
AttributeType.LOW_TEMPERATURE_ALARM: BinarySensorEntityDescription(
key="low_temperature",
device_class=BinarySensorDeviceClass.COLD,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.MALFUNCTION_ALARM: BinarySensorEntityDescription(
key="malfunction",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.MAXIMUM_ALARM: BinarySensorEntityDescription(
key="maximum",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.MINIMUM_ALARM: BinarySensorEntityDescription(
key="minimum",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.MOTION_ALARM: BinarySensorEntityDescription(
key="motion",
device_class=BinarySensorDeviceClass.MOTION,
),
AttributeType.MOTOR_BLOCKED_ALARM: BinarySensorEntityDescription(
key="motor_blocked",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.ON_OFF: BinarySensorEntityDescription(
key="plug",
device_class=BinarySensorDeviceClass.PLUG,
),
AttributeType.OPEN_CLOSE: BinarySensorEntityDescription(
key="opening",
device_class=BinarySensorDeviceClass.OPENING,
),
AttributeType.OVER_CURRENT_ALARM: BinarySensorEntityDescription(
key="overcurrent",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.OVERLOAD_ALARM: BinarySensorEntityDescription(
key="overload",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.PRESENCE_ALARM: BinarySensorEntityDescription(
key="presence",
device_class=BinarySensorDeviceClass.PRESENCE,
),
AttributeType.POWER_SUPPLY_ALARM: BinarySensorEntityDescription(
key="power",
device_class=BinarySensorDeviceClass.POWER,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.RAIN_FALL: BinarySensorEntityDescription(
key="rain",
device_class=BinarySensorDeviceClass.MOISTURE,
),
AttributeType.REPLACE_FILTER_ALARM: BinarySensorEntityDescription(
key="replace_filter",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.SMOKE_ALARM: BinarySensorEntityDescription(
key="smoke",
device_class=BinarySensorDeviceClass.SMOKE,
),
AttributeType.STORAGE_ALARM: BinarySensorEntityDescription(
key="storage",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.SURGE_ALARM: BinarySensorEntityDescription(
key="surge",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.TAMPER_ALARM: BinarySensorEntityDescription(
key="tamper",
device_class=BinarySensorDeviceClass.TAMPER,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.VOLTAGE_DROP_ALARM: BinarySensorEntityDescription(
key="voltage_drop",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
AttributeType.WATER_ALARM: BinarySensorEntityDescription(
key="water",
device_class=BinarySensorDeviceClass.MOISTURE,
entity_category=EntityCategory.DIAGNOSTIC,
),
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HomeeConfigEntry,
async_add_devices: AddConfigEntryEntitiesCallback,
) -> None:
"""Add the Homee platform for the binary sensor component."""
async_add_devices(
HomeeBinarySensor(
attribute, config_entry, BINARY_SENSOR_DESCRIPTIONS[attribute.type]
)
for node in config_entry.runtime_data.nodes
for attribute in node.attributes
if attribute.type in BINARY_SENSOR_DESCRIPTIONS and not attribute.editable
)
class HomeeBinarySensor(HomeeEntity, BinarySensorEntity):
"""Representation of a Homee binary sensor."""
def __init__(
self,
attribute: HomeeAttribute,
entry: HomeeConfigEntry,
description: BinarySensorEntityDescription,
) -> None:
"""Initialize a Homee binary sensor entity."""
super().__init__(attribute, entry)
self.entity_description = description
self._attr_translation_key = description.key
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return bool(self._attribute.current_value)

View File

@ -8,5 +8,5 @@
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["homee"], "loggers": ["homee"],
"quality_scale": "bronze", "quality_scale": "bronze",
"requirements": ["pyHomee==1.2.7"] "requirements": ["pyHomee==1.2.8"]
} }

View File

@ -26,6 +26,76 @@
} }
}, },
"entity": { "entity": {
"binary_sensor": {
"blackout_alarm": {
"name": "Blackout"
},
"carbon_dioxide": {
"name": "Carbon dioxide"
},
"flood": {
"name": "Flood"
},
"high_temperature": {
"name": "High temperature"
},
"leak_alarm": {
"name": "Leak"
},
"load_alarm": {
"name": "Load",
"state": {
"off": "Normal",
"on": "Overload"
}
},
"low_temperature": {
"name": "Low temperature"
},
"malfunction": {
"name": "Malfunction"
},
"maximum": {
"name": "Maximum level"
},
"minimum": {
"name": "Minimum level"
},
"motor_blocked": {
"name": "Motor blocked"
},
"overcurrent": {
"name": "Overcurrent"
},
"overload": {
"name": "Overload"
},
"rain": {
"name": "Rain"
},
"replace_filter": {
"name": "Replace filter",
"state": {
"on": "Replace"
}
},
"storage": {
"name": "Storage",
"state": {
"off": "Space available",
"on": "Storage full"
}
},
"surge": {
"name": "Surge"
},
"voltage_drop": {
"name": "Voltage drop"
},
"water": {
"name": "Water"
}
},
"button": { "button": {
"automatic_mode": { "automatic_mode": {
"name": "Automatic mode" "name": "Automatic mode"

View File

@ -83,7 +83,7 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity):
@property @property
def supported_features(self) -> MediaPlayerEntityFeature: def supported_features(self) -> MediaPlayerEntityFeature:
"""Flag media player features that are supported.""" """Flag media player features that are supported."""
features = MediaPlayerEntityFeature(0) features = MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON
if self.service.has(CharacteristicsTypes.ACTIVE_IDENTIFIER): if self.service.has(CharacteristicsTypes.ACTIVE_IDENTIFIER):
features |= MediaPlayerEntityFeature.SELECT_SOURCE features |= MediaPlayerEntityFeature.SELECT_SOURCE
@ -177,6 +177,14 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity):
return MediaPlayerState.ON return MediaPlayerState.ON
async def async_turn_on(self) -> None:
"""Turn the tv on."""
await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: 1})
async def async_turn_off(self) -> None:
"""Turn the tv off."""
await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: 0})
async def async_media_play(self) -> None: async def async_media_play(self) -> None:
"""Send play command.""" """Send play command."""
if self.state == MediaPlayerState.PLAYING: if self.state == MediaPlayerState.PLAYING:

View File

@ -35,7 +35,7 @@
"services": { "services": {
"activate_eco_mode_with_duration": { "activate_eco_mode_with_duration": {
"name": "Activate eco mode with duration", "name": "Activate eco mode with duration",
"description": "Activates eco mode with period.", "description": "Activates the eco mode for a specified duration.",
"fields": { "fields": {
"duration": { "duration": {
"name": "Duration", "name": "Duration",
@ -49,7 +49,7 @@
}, },
"activate_eco_mode_with_period": { "activate_eco_mode_with_period": {
"name": "Activate eco more with period", "name": "Activate eco more with period",
"description": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::description%]", "description": "Activates the eco mode until a given time.",
"fields": { "fields": {
"endtime": { "endtime": {
"name": "Endtime", "name": "Endtime",
@ -63,7 +63,7 @@
}, },
"activate_vacation": { "activate_vacation": {
"name": "Activate vacation", "name": "Activate vacation",
"description": "Activates the vacation mode until the given time.", "description": "Activates the vacation mode until a given time.",
"fields": { "fields": {
"endtime": { "endtime": {
"name": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_period::fields::endtime::name%]", "name": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_period::fields::endtime::name%]",

View File

@ -89,7 +89,7 @@
"fields": { "fields": {
"mode": { "mode": {
"name": "Mode", "name": "Mode",
"description": "Operation mode. For example, _normal_, _eco_, or _away_. For a list of possible values, refer to the integration documentation." "description": "Operation mode. For example, \"normal\", \"eco\", or \"away\". For a list of possible values, refer to the integration documentation."
} }
} }
}, },

View File

@ -13,7 +13,7 @@ from aioautomower.exceptions import (
HusqvarnaTimeoutError, HusqvarnaTimeoutError,
HusqvarnaWSServerHandshakeError, HusqvarnaWSServerHandshakeError,
) )
from aioautomower.model import MowerAttributes from aioautomower.model import MowerDictionary
from aioautomower.session import AutomowerSession from aioautomower.session import AutomowerSession
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
@ -32,7 +32,7 @@ DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time
type AutomowerConfigEntry = ConfigEntry[AutomowerDataUpdateCoordinator] type AutomowerConfigEntry = ConfigEntry[AutomowerDataUpdateCoordinator]
class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]): class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]):
"""Class to manage fetching Husqvarna data.""" """Class to manage fetching Husqvarna data."""
config_entry: AutomowerConfigEntry config_entry: AutomowerConfigEntry
@ -61,7 +61,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
self._zones_last_update: dict[str, set[str]] = {} self._zones_last_update: dict[str, set[str]] = {}
self._areas_last_update: dict[str, set[int]] = {} self._areas_last_update: dict[str, set[int]] = {}
async def _async_update_data(self) -> dict[str, MowerAttributes]: async def _async_update_data(self) -> MowerDictionary:
"""Subscribe for websocket and poll data from the API.""" """Subscribe for websocket and poll data from the API."""
if not self.ws_connected: if not self.ws_connected:
await self.api.connect() await self.api.connect()
@ -84,7 +84,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
return data return data
@callback @callback
def callback(self, ws_data: dict[str, MowerAttributes]) -> None: def callback(self, ws_data: MowerDictionary) -> None:
"""Process websocket callbacks and write them to the DataUpdateCoordinator.""" """Process websocket callbacks and write them to the DataUpdateCoordinator."""
self.async_set_updated_data(ws_data) self.async_set_updated_data(ws_data)
@ -119,7 +119,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
"reconnect_task", "reconnect_task",
) )
def _async_add_remove_devices(self, data: dict[str, MowerAttributes]) -> None: def _async_add_remove_devices(self, data: MowerDictionary) -> None:
"""Add new device, remove non-existing device.""" """Add new device, remove non-existing device."""
current_devices = set(data) current_devices = set(data)
@ -159,9 +159,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
for mower_callback in self.new_devices_callbacks: for mower_callback in self.new_devices_callbacks:
mower_callback(new_devices) mower_callback(new_devices)
def _async_add_remove_stay_out_zones( def _async_add_remove_stay_out_zones(self, data: MowerDictionary) -> None:
self, data: dict[str, MowerAttributes]
) -> None:
"""Add new stay-out zones, remove non-existing stay-out zones.""" """Add new stay-out zones, remove non-existing stay-out zones."""
current_zones = { current_zones = {
mower_id: set(mower_data.stay_out_zones.zones) mower_id: set(mower_data.stay_out_zones.zones)
@ -207,7 +205,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib
return current_zones return current_zones
def _async_add_remove_work_areas(self, data: dict[str, MowerAttributes]) -> None: def _async_add_remove_work_areas(self, data: MowerDictionary) -> None:
"""Add new work areas, remove non-existing work areas.""" """Add new work areas, remove non-existing work areas."""
current_areas = { current_areas = {
mower_id: set(mower_data.work_areas) mower_id: set(mower_data.work_areas)

View File

@ -8,5 +8,5 @@
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["aioautomower"], "loggers": ["aioautomower"],
"quality_scale": "silver", "quality_scale": "silver",
"requirements": ["aioautomower==2025.1.1"] "requirements": ["aioautomower==2025.3.1"]
} }

View File

@ -38,7 +38,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ImgwPibConfigEntry) -> b
hydrological_details=False, hydrological_details=False,
) )
except (ClientError, TimeoutError, ApiError) as err: except (ClientError, TimeoutError, ApiError) as err:
raise ConfigEntryNotReady from err raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={
"entry": entry.title,
"error": repr(err),
},
) from err
coordinator = ImgwPibDataUpdateCoordinator(hass, entry, imgwpib, station_id) coordinator = ImgwPibDataUpdateCoordinator(hass, entry, imgwpib, station_id)
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()

View File

@ -63,4 +63,11 @@ class ImgwPibDataUpdateCoordinator(DataUpdateCoordinator[HydrologicalData]):
try: try:
return await self.imgwpib.get_hydrological_data() return await self.imgwpib.get_hydrological_data()
except ApiError as err: except ApiError as err:
raise UpdateFailed(err) from err raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={
"entry": self.config_entry.title,
"error": repr(err),
},
) from err

View File

@ -25,5 +25,13 @@
"name": "Water temperature" "name": "Water temperature"
} }
} }
},
"exceptions": {
"cannot_connect": {
"message": "An error occurred while connecting to the IMGW-PIB API for {entry}: {error}"
},
"update_error": {
"message": "An error occurred while retrieving data from the IMGW-PIB API for {entry}: {error}"
}
} }
} }

View File

@ -8,6 +8,7 @@ from iometer import IOmeterClient, IOmeterConnectionError, Reading, Status
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN from .const import DOMAIN
@ -31,6 +32,7 @@ class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]):
config_entry: IOmeterConfigEntry config_entry: IOmeterConfigEntry
client: IOmeterClient client: IOmeterClient
current_fw_version: str = ""
def __init__( def __init__(
self, self,
@ -58,4 +60,17 @@ class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]):
except IOmeterConnectionError as error: except IOmeterConnectionError as error:
raise UpdateFailed(f"Error communicating with IOmeter: {error}") from error raise UpdateFailed(f"Error communicating with IOmeter: {error}") from error
fw_version = f"{status.device.core.version}/{status.device.bridge.version}"
if self.current_fw_version and fw_version != self.current_fw_version:
device_registry = dr.async_get(self.hass)
device_entry = device_registry.async_get_device(
identifiers={(DOMAIN, status.device.id)}
)
assert device_entry
device_registry.async_update_device(
device_entry.id,
sw_version=fw_version,
)
self.current_fw_version = fw_version
return IOmeterData(reading=reading, status=status) return IOmeterData(reading=reading, status=status)

View File

@ -20,5 +20,5 @@ class IOmeterEntity(CoordinatorEntity[IOMeterCoordinator]):
identifiers={(DOMAIN, status.device.id)}, identifiers={(DOMAIN, status.device.id)},
manufacturer="IOmeter GmbH", manufacturer="IOmeter GmbH",
model="IOmeter", model="IOmeter",
sw_version=f"{status.device.core.version}/{status.device.bridge.version}", sw_version=coordinator.current_fw_version,
) )

View File

@ -4,7 +4,7 @@
"user": { "user": {
"description": "Fill out your U.S. or Canadian ZIP code.", "description": "Fill out your U.S. or Canadian ZIP code.",
"data": { "data": {
"zip_code": "ZIP Code" "zip_code": "ZIP code"
} }
} }
}, },

View File

@ -12,7 +12,7 @@
"requirements": [ "requirements": [
"xknx==3.6.0", "xknx==3.6.0",
"xknxproject==3.8.2", "xknxproject==3.8.2",
"knx-frontend==2025.1.30.194235" "knx-frontend==2025.3.8.214559"
], ],
"single_config_entry": true "single_config_entry": true
} }

View File

@ -114,7 +114,7 @@ BINARY_SENSOR_SCHEMA = vol.Schema(
), ),
vol.Optional(CONF_RESET_AFTER): selector.NumberSelector( vol.Optional(CONF_RESET_AFTER): selector.NumberSelector(
selector.NumberSelectorConfig( selector.NumberSelectorConfig(
min=0, max=10, step=0.1, unit_of_measurement="s" min=0, max=600, step=0.1, unit_of_measurement="s"
) )
), ),
}, },

View File

@ -61,6 +61,42 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
client=client, client=client,
) )
# initialize the firmware update coordinator early to check the firmware version
firmware_device = LaMarzoccoMachine(
model=entry.data[CONF_MODEL],
serial_number=entry.unique_id,
name=entry.data[CONF_NAME],
cloud_client=cloud_client,
)
firmware_coordinator = LaMarzoccoFirmwareUpdateCoordinator(
hass, entry, firmware_device
)
await firmware_coordinator.async_config_entry_first_refresh()
gateway_version = version.parse(
firmware_device.firmware[FirmwareType.GATEWAY].current_version
)
if gateway_version >= version.parse("v5.0.9"):
# remove host from config entry, it is not supported anymore
data = {k: v for k, v in entry.data.items() if k != CONF_HOST}
hass.config_entries.async_update_entry(
entry,
data=data,
)
elif gateway_version < version.parse("v3.4-rc5"):
# incompatible gateway firmware, create an issue
ir.async_create_issue(
hass,
DOMAIN,
"unsupported_gateway_firmware",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
translation_key="unsupported_gateway_firmware",
translation_placeholders={"gateway_version": str(gateway_version)},
)
# initialize local API # initialize local API
local_client: LaMarzoccoLocalClient | None = None local_client: LaMarzoccoLocalClient | None = None
if (host := entry.data.get(CONF_HOST)) is not None: if (host := entry.data.get(CONF_HOST)) is not None:
@ -117,30 +153,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
coordinators = LaMarzoccoRuntimeData( coordinators = LaMarzoccoRuntimeData(
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client), LaMarzoccoConfigUpdateCoordinator(hass, entry, device, local_client),
LaMarzoccoFirmwareUpdateCoordinator(hass, entry, device), firmware_coordinator,
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device), LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
) )
# API does not like concurrent requests, so no asyncio.gather here # API does not like concurrent requests, so no asyncio.gather here
await coordinators.config_coordinator.async_config_entry_first_refresh() await coordinators.config_coordinator.async_config_entry_first_refresh()
await coordinators.firmware_coordinator.async_config_entry_first_refresh()
await coordinators.statistics_coordinator.async_config_entry_first_refresh() await coordinators.statistics_coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinators entry.runtime_data = coordinators
gateway_version = device.firmware[FirmwareType.GATEWAY].current_version
if version.parse(gateway_version) < version.parse("v3.4-rc5"):
# incompatible gateway firmware, create an issue
ir.async_create_issue(
hass,
DOMAIN,
"unsupported_gateway_firmware",
is_fixable=False,
severity=ir.IssueSeverity.ERROR,
translation_key="unsupported_gateway_firmware",
translation_placeholders={"gateway_version": gateway_version},
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
async def update_listener( async def update_listener(

View File

@ -37,5 +37,5 @@
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["pylamarzocco"], "loggers": ["pylamarzocco"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["pylamarzocco==1.4.7"] "requirements": ["pylamarzocco==1.4.9"]
} }

View File

@ -144,9 +144,12 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = (
set_value_fn=lambda machine, value, key: machine.set_prebrew_time( set_value_fn=lambda machine, value, key: machine.set_prebrew_time(
prebrew_off_time=value, key=key prebrew_off_time=value, key=key
), ),
native_value_fn=lambda config, key: config.prebrew_configuration[key].off_time, native_value_fn=lambda config, key: config.prebrew_configuration[key][
0
].off_time,
available_fn=lambda device: len(device.config.prebrew_configuration) > 0 available_fn=lambda device: len(device.config.prebrew_configuration) > 0
and device.config.prebrew_mode == PrebrewMode.PREBREW, and device.config.prebrew_mode
in (PrebrewMode.PREBREW, PrebrewMode.PREBREW_ENABLED),
supported_fn=lambda coordinator: coordinator.device.model supported_fn=lambda coordinator: coordinator.device.model
!= MachineModel.GS3_MP, != MachineModel.GS3_MP,
), ),
@ -162,9 +165,12 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = (
set_value_fn=lambda machine, value, key: machine.set_prebrew_time( set_value_fn=lambda machine, value, key: machine.set_prebrew_time(
prebrew_on_time=value, key=key prebrew_on_time=value, key=key
), ),
native_value_fn=lambda config, key: config.prebrew_configuration[key].off_time, native_value_fn=lambda config, key: config.prebrew_configuration[key][
0
].off_time,
available_fn=lambda device: len(device.config.prebrew_configuration) > 0 available_fn=lambda device: len(device.config.prebrew_configuration) > 0
and device.config.prebrew_mode == PrebrewMode.PREBREW, and device.config.prebrew_mode
in (PrebrewMode.PREBREW, PrebrewMode.PREBREW_ENABLED),
supported_fn=lambda coordinator: coordinator.device.model supported_fn=lambda coordinator: coordinator.device.model
!= MachineModel.GS3_MP, != MachineModel.GS3_MP,
), ),
@ -180,8 +186,8 @@ KEY_ENTITIES: tuple[LaMarzoccoKeyNumberEntityDescription, ...] = (
set_value_fn=lambda machine, value, key: machine.set_preinfusion_time( set_value_fn=lambda machine, value, key: machine.set_preinfusion_time(
preinfusion_time=value, key=key preinfusion_time=value, key=key
), ),
native_value_fn=lambda config, key: config.prebrew_configuration[ native_value_fn=lambda config, key: config.prebrew_configuration[key][
key 1
].preinfusion_time, ].preinfusion_time,
available_fn=lambda device: len(device.config.prebrew_configuration) > 0 available_fn=lambda device: len(device.config.prebrew_configuration) > 0
and device.config.prebrew_mode == PrebrewMode.PREINFUSION, and device.config.prebrew_mode == PrebrewMode.PREINFUSION,

View File

@ -38,6 +38,7 @@ STEAM_LEVEL_LM_TO_HA = {value: key for key, value in STEAM_LEVEL_HA_TO_LM.items(
PREBREW_MODE_HA_TO_LM = { PREBREW_MODE_HA_TO_LM = {
"disabled": PrebrewMode.DISABLED, "disabled": PrebrewMode.DISABLED,
"prebrew": PrebrewMode.PREBREW, "prebrew": PrebrewMode.PREBREW,
"prebrew_enabled": PrebrewMode.PREBREW_ENABLED,
"preinfusion": PrebrewMode.PREINFUSION, "preinfusion": PrebrewMode.PREINFUSION,
} }

View File

@ -148,6 +148,7 @@
"state": { "state": {
"disabled": "Disabled", "disabled": "Disabled",
"prebrew": "Prebrew", "prebrew": "Prebrew",
"prebrew_enabled": "Prebrew",
"preinfusion": "Preinfusion" "preinfusion": "Preinfusion"
} }
}, },

View File

@ -396,19 +396,19 @@
}, },
"address_to_device_id": { "address_to_device_id": {
"name": "Address to device ID", "name": "Address to device ID",
"description": "Convert LCN address to device ID.", "description": "Converts an LCN address into a device ID.",
"fields": { "fields": {
"id": { "id": {
"name": "Module or group ID", "name": "Module or group ID",
"description": "Target module or group ID." "description": "Module or group number of the target."
}, },
"segment_id": { "segment_id": {
"name": "Segment ID", "name": "Segment ID",
"description": "Target segment ID." "description": "Segment number of the target."
}, },
"type": { "type": {
"name": "Type", "name": "Type",
"description": "Target type." "description": "Module type of the target."
}, },
"host": { "host": {
"name": "Host name", "name": "Host name",

View File

@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/lg_thinq", "documentation": "https://www.home-assistant.io/integrations/lg_thinq",
"iot_class": "cloud_push", "iot_class": "cloud_push",
"loggers": ["thinqconnect"], "loggers": ["thinqconnect"],
"requirements": ["thinqconnect==1.0.4"] "requirements": ["thinqconnect==1.0.5"]
} }

View File

@ -77,31 +77,31 @@
"status_code": { "status_code": {
"name": "Status code", "name": "Status code",
"state": { "state": {
"br": "Bonnet Removed", "br": "Bonnet removed",
"ccc": "Clean Cycle Complete", "ccc": "Clean cycle complete",
"ccp": "Clean Cycle In Progress", "ccp": "Clean cycle in progress",
"cd": "Cat Detected", "cd": "Cat detected",
"csf": "Cat Sensor Fault", "csf": "Cat sensor fault",
"csi": "Cat Sensor Interrupted", "csi": "Cat sensor interrupted",
"cst": "Cat Sensor Timing", "cst": "Cat sensor timing",
"df1": "Drawer Almost Full - 2 Cycles Left", "df1": "Drawer almost full - 2 cycles left",
"df2": "Drawer Almost Full - 1 Cycle Left", "df2": "Drawer almost full - 1 cycle left",
"dfs": "Drawer Full", "dfs": "Drawer full",
"dhf": "Dump + Home Position Fault", "dhf": "Dump + home position fault",
"dpf": "Dump Position Fault", "dpf": "Dump position fault",
"ec": "Empty Cycle", "ec": "Empty cycle",
"hpf": "Home Position Fault", "hpf": "Home position fault",
"off": "[%key:common::state::off%]", "off": "[%key:common::state::off%]",
"offline": "Offline", "offline": "Offline",
"otf": "Over Torque Fault", "otf": "Over torque fault",
"p": "[%key:common::state::paused%]", "p": "[%key:common::state::paused%]",
"pd": "Pinch Detect", "pd": "Pinch detect",
"pwrd": "Powering Down", "pwrd": "Powering down",
"pwru": "Powering Up", "pwru": "Powering up",
"rdy": "Ready", "rdy": "Ready",
"scf": "Cat Sensor Fault At Startup", "scf": "Cat sensor fault at startup",
"sdf": "Drawer Full At Startup", "sdf": "Drawer full at startup",
"spf": "Pinch Detect At Startup" "spf": "Pinch detect at startup"
} }
}, },
"waste_drawer": { "waste_drawer": {

View File

@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/local_calendar", "documentation": "https://www.home-assistant.io/integrations/local_calendar",
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["ical"], "loggers": ["ical"],
"requirements": ["ical==8.3.0"] "requirements": ["ical==9.0.1"]
} }

View File

@ -5,5 +5,5 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/local_todo", "documentation": "https://www.home-assistant.io/integrations/local_todo",
"iot_class": "local_polling", "iot_class": "local_polling",
"requirements": ["ical==8.3.0"] "requirements": ["ical==9.0.1"]
} }

View File

@ -6,7 +6,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.typing import ConfigType
from . import http, llm_api from . import http
from .const import DOMAIN from .const import DOMAIN
from .session import SessionManager from .session import SessionManager
from .types import MCPServerConfigEntry from .types import MCPServerConfigEntry
@ -25,7 +25,6 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Model Context Protocol component.""" """Set up the Model Context Protocol component."""
http.async_register(hass) http.async_register(hass)
llm_api.async_register_api(hass)
return True return True

View File

@ -16,7 +16,7 @@ from homeassistant.helpers.selector import (
SelectSelectorConfig, SelectSelectorConfig,
) )
from .const import DOMAIN, LLM_API, LLM_API_NAME from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -33,13 +33,6 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult: ) -> ConfigFlowResult:
"""Handle the initial step.""" """Handle the initial step."""
llm_apis = {api.id: api.name for api in llm.async_get_apis(self.hass)} llm_apis = {api.id: api.name for api in llm.async_get_apis(self.hass)}
if LLM_API not in llm_apis:
# MCP server component is not loaded yet, so make the LLM API a choice.
llm_apis = {
LLM_API: LLM_API_NAME,
**llm_apis,
}
if user_input is not None: if user_input is not None:
return self.async_create_entry( return self.async_create_entry(
title=llm_apis[user_input[CONF_LLM_HASS_API]], data=user_input title=llm_apis[user_input[CONF_LLM_HASS_API]], data=user_input

View File

@ -2,5 +2,6 @@
DOMAIN = "mcp_server" DOMAIN = "mcp_server"
TITLE = "Model Context Protocol Server" TITLE = "Model Context Protocol Server"
LLM_API = "stateless_assist" # The Stateless API is no longer registered explicitly, but this name may still exist in the
LLM_API_NAME = "Stateless Assist" # users config entry.
STATELESS_LLM_API = "stateless_assist"

View File

@ -1,19 +1,18 @@
"""LLM API for MCP Server.""" """LLM API for MCP Server.
from homeassistant.core import HomeAssistant, callback This is a modified version of the AssistAPI that does not include the home state
in the prompt. This API is not registered with the LLM API registry since it is
only used by the MCP Server. The MCP server will substitute this API when the
user selects the Assist API.
"""
from homeassistant.core import callback
from homeassistant.helpers import llm from homeassistant.helpers import llm
from homeassistant.util import yaml as yaml_util from homeassistant.util import yaml as yaml_util
from .const import LLM_API, LLM_API_NAME
EXPOSED_ENTITY_FIELDS = {"name", "domain", "description", "areas", "names"} EXPOSED_ENTITY_FIELDS = {"name", "domain", "description", "areas", "names"}
def async_register_api(hass: HomeAssistant) -> None:
"""Register the LLM API."""
llm.async_register_api(hass, StatelessAssistAPI(hass))
class StatelessAssistAPI(llm.AssistAPI): class StatelessAssistAPI(llm.AssistAPI):
"""LLM API for MCP Server that provides the Assist API without state information in the prompt. """LLM API for MCP Server that provides the Assist API without state information in the prompt.
@ -22,12 +21,6 @@ class StatelessAssistAPI(llm.AssistAPI):
actions don't care about the current state, there is little quality loss. actions don't care about the current state, there is little quality loss.
""" """
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the StatelessAssistAPI."""
super().__init__(hass)
self.id = LLM_API
self.name = LLM_API_NAME
@callback @callback
def _async_get_exposed_entities_prompt( def _async_get_exposed_entities_prompt(
self, llm_context: llm.LLMContext, exposed_entities: dict | None self, llm_context: llm.LLMContext, exposed_entities: dict | None

View File

@ -21,6 +21,9 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import llm from homeassistant.helpers import llm
from .const import STATELESS_LLM_API
from .llm_api import StatelessAssistAPI
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -50,13 +53,21 @@ async def create_server(
server = Server("home-assistant") server = Server("home-assistant")
async def get_api_instance() -> llm.APIInstance:
"""Substitute the StatelessAssistAPI for the Assist API if selected."""
if llm_api_id in (STATELESS_LLM_API, llm.LLM_API_ASSIST):
api = StatelessAssistAPI(hass)
return await api.async_get_api_instance(llm_context)
return await llm.async_get_api(hass, llm_api_id, llm_context)
@server.list_prompts() # type: ignore[no-untyped-call, misc] @server.list_prompts() # type: ignore[no-untyped-call, misc]
async def handle_list_prompts() -> list[types.Prompt]: async def handle_list_prompts() -> list[types.Prompt]:
llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) llm_api = await get_api_instance()
return [ return [
types.Prompt( types.Prompt(
name=llm_api.api.name, name=llm_api.api.name,
description=f"Default prompt for the Home Assistant LLM API {llm_api.api.name}", description=f"Default prompt for Home Assistant {llm_api.api.name} API",
) )
] ]
@ -64,12 +75,12 @@ async def create_server(
async def handle_get_prompt( async def handle_get_prompt(
name: str, arguments: dict[str, str] | None name: str, arguments: dict[str, str] | None
) -> types.GetPromptResult: ) -> types.GetPromptResult:
llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) llm_api = await get_api_instance()
if name != llm_api.api.name: if name != llm_api.api.name:
raise ValueError(f"Unknown prompt: {name}") raise ValueError(f"Unknown prompt: {name}")
return types.GetPromptResult( return types.GetPromptResult(
description=f"Default prompt for the Home Assistant LLM API {llm_api.api.name}", description=f"Default prompt for Home Assistant {llm_api.api.name} API",
messages=[ messages=[
types.PromptMessage( types.PromptMessage(
role="assistant", role="assistant",
@ -84,13 +95,13 @@ async def create_server(
@server.list_tools() # type: ignore[no-untyped-call, misc] @server.list_tools() # type: ignore[no-untyped-call, misc]
async def list_tools() -> list[types.Tool]: async def list_tools() -> list[types.Tool]:
"""List available time tools.""" """List available time tools."""
llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) llm_api = await get_api_instance()
return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools] return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools]
@server.call_tool() # type: ignore[no-untyped-call, misc] @server.call_tool() # type: ignore[no-untyped-call, misc]
async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]: async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]:
"""Handle calling tools.""" """Handle calling tools."""
llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) llm_api = await get_api_instance()
tool_input = llm.ToolInput(tool_name=name, tool_args=arguments) tool_input = llm.ToolInput(tool_name=name, tool_args=arguments)
_LOGGER.debug("Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args) _LOGGER.debug("Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args)

View File

@ -146,11 +146,11 @@
"services": { "services": {
"get_mealplan": { "get_mealplan": {
"name": "Get mealplan", "name": "Get mealplan",
"description": "Get mealplan from Mealie", "description": "Gets a mealplan from Mealie",
"fields": { "fields": {
"config_entry_id": { "config_entry_id": {
"name": "Mealie instance", "name": "Mealie instance",
"description": "Select the Mealie instance to get mealplan from" "description": "The Mealie instance to use for this action."
}, },
"start_date": { "start_date": {
"name": "Start date", "name": "Start date",
@ -164,7 +164,7 @@
}, },
"get_recipe": { "get_recipe": {
"name": "Get recipe", "name": "Get recipe",
"description": "Get recipe from Mealie", "description": "Gets a recipe from Mealie",
"fields": { "fields": {
"config_entry_id": { "config_entry_id": {
"name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]",
@ -178,7 +178,7 @@
}, },
"import_recipe": { "import_recipe": {
"name": "Import recipe", "name": "Import recipe",
"description": "Import recipe from an URL", "description": "Imports a recipe from an URL",
"fields": { "fields": {
"config_entry_id": { "config_entry_id": {
"name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]",
@ -196,7 +196,7 @@
}, },
"set_random_mealplan": { "set_random_mealplan": {
"name": "Set random mealplan", "name": "Set random mealplan",
"description": "Set a random mealplan for a specific date", "description": "Sets a random mealplan for a specific date",
"fields": { "fields": {
"config_entry_id": { "config_entry_id": {
"name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]",
@ -214,7 +214,7 @@
}, },
"set_mealplan": { "set_mealplan": {
"name": "Set a mealplan", "name": "Set a mealplan",
"description": "Set a mealplan for a specific date", "description": "Sets a mealplan for a specific date",
"fields": { "fields": {
"config_entry_id": { "config_entry_id": {
"name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]",

View File

@ -5,5 +5,5 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/moehlenhoff_alpha2", "documentation": "https://www.home-assistant.io/integrations/moehlenhoff_alpha2",
"iot_class": "local_push", "iot_class": "local_push",
"requirements": ["moehlenhoff-alpha2==1.3.1"] "requirements": ["moehlenhoff-alpha2==1.4.0"]
} }

View File

@ -150,6 +150,7 @@ ABBREVIATIONS = {
"pl_rst_pct": "payload_reset_percentage", "pl_rst_pct": "payload_reset_percentage",
"pl_rst_pr_mode": "payload_reset_preset_mode", "pl_rst_pr_mode": "payload_reset_preset_mode",
"pl_stop": "payload_stop", "pl_stop": "payload_stop",
"pl_stop_tilt": "payload_stop_tilt",
"pl_strt": "payload_start", "pl_strt": "payload_start",
"pl_ret": "payload_return_to_base", "pl_ret": "payload_return_to_base",
"pl_toff": "payload_turn_off", "pl_toff": "payload_turn_off",

View File

@ -1022,8 +1022,6 @@ class MQTT:
Resubscribe to all topics we were subscribed to and publish birth Resubscribe to all topics we were subscribed to and publish birth
message. message.
""" """
# pylint: disable-next=import-outside-toplevel
if reason_code.is_failure: if reason_code.is_failure:
# 24: Continue authentication # 24: Continue authentication
# 25: Re-authenticate # 25: Re-authenticate

View File

@ -81,6 +81,7 @@ CONF_TILT_STATUS_TOPIC = "tilt_status_topic"
CONF_TILT_STATUS_TEMPLATE = "tilt_status_template" CONF_TILT_STATUS_TEMPLATE = "tilt_status_template"
CONF_STATE_STOPPED = "state_stopped" CONF_STATE_STOPPED = "state_stopped"
CONF_PAYLOAD_STOP_TILT = "payload_stop_tilt"
CONF_TILT_CLOSED_POSITION = "tilt_closed_value" CONF_TILT_CLOSED_POSITION = "tilt_closed_value"
CONF_TILT_MAX = "tilt_max" CONF_TILT_MAX = "tilt_max"
CONF_TILT_MIN = "tilt_min" CONF_TILT_MIN = "tilt_min"
@ -203,6 +204,9 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend(
vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_GET_POSITION_TEMPLATE): cv.template, vol.Optional(CONF_GET_POSITION_TEMPLATE): cv.template,
vol.Optional(CONF_TILT_COMMAND_TEMPLATE): cv.template, vol.Optional(CONF_TILT_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_PAYLOAD_STOP_TILT, default=DEFAULT_PAYLOAD_STOP): vol.Any(
cv.string, None
),
} }
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
@ -592,6 +596,12 @@ class MqttCover(MqttEntity, CoverEntity):
self._attr_current_cover_tilt_position = tilt_percentage self._attr_current_cover_tilt_position = tilt_percentage
self.async_write_ha_state() self.async_write_ha_state()
async def async_stop_cover_tilt(self, **kwargs: Any) -> None:
"""Stop moving the cover tilt."""
await self.async_publish_with_config(
self._config[CONF_TILT_COMMAND_TOPIC], self._config[CONF_PAYLOAD_STOP_TILT]
)
async def async_set_cover_position(self, **kwargs: Any) -> None: async def async_set_cover_position(self, **kwargs: Any) -> None:
"""Move the cover to a specific position.""" """Move the cover to a specific position."""
position_percentage = kwargs[ATTR_POSITION] position_percentage = kwargs[ATTR_POSITION]

View File

@ -362,7 +362,7 @@
"fields": { "fields": {
"evaluate_payload": { "evaluate_payload": {
"name": "Evaluate payload", "name": "Evaluate payload",
"description": "When `payload` is a Python bytes literal, evaluate the bytes literal and publish the raw data." "description": "If 'Payload' is a Python bytes literal, evaluate the bytes literal and publish the raw data."
}, },
"topic": { "topic": {
"name": "Topic", "name": "Topic",

View File

@ -101,6 +101,17 @@
"medium": "Medium", "medium": "Medium",
"high": "High", "high": "High",
"very_high": "Very high" "very_high": "Very high"
},
"state_attributes": {
"options": {
"state": {
"very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]",
"low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]",
"medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]",
"high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]",
"very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]"
}
}
} }
}, },
"pmsx003_pm1": { "pmsx003_pm1": {
@ -123,6 +134,17 @@
"medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]",
"high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]",
"very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]"
},
"state_attributes": {
"options": {
"state": {
"very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]",
"low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]",
"medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]",
"high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]",
"very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]"
}
}
} }
}, },
"sds011_pm10": { "sds011_pm10": {
@ -148,6 +170,17 @@
"medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]",
"high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]",
"very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]"
},
"state_attributes": {
"options": {
"state": {
"very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]",
"low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]",
"medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]",
"high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]",
"very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]"
}
}
} }
}, },
"sps30_pm1": { "sps30_pm1": {

View File

@ -88,7 +88,7 @@
"name": "Cache start time" "name": "Cache start time"
}, },
"nextcloud_cache_ttl": { "nextcloud_cache_ttl": {
"name": "Cache ttl" "name": "Cache TTL"
}, },
"nextcloud_database_size": { "nextcloud_database_size": {
"name": "Database size" "name": "Database size"
@ -268,13 +268,13 @@
"name": "Updates available" "name": "Updates available"
}, },
"nextcloud_system_cpuload_1": { "nextcloud_system_cpuload_1": {
"name": "CPU Load last 1 minute" "name": "CPU load last 1 minute"
}, },
"nextcloud_system_cpuload_15": { "nextcloud_system_cpuload_15": {
"name": "CPU Load last 15 minutes" "name": "CPU load last 15 minutes"
}, },
"nextcloud_system_cpuload_5": { "nextcloud_system_cpuload_5": {
"name": "CPU Load last 5 minutes" "name": "CPU load last 5 minutes"
}, },
"nextcloud_system_freespace": { "nextcloud_system_freespace": {
"name": "Free space" "name": "Free space"

View File

@ -36,6 +36,7 @@ from .const import (
ATTR_SETTINGS, ATTR_SETTINGS,
ATTR_STATUS, ATTR_STATUS,
CONF_PROFILE_ID, CONF_PROFILE_ID,
DOMAIN,
UPDATE_INTERVAL_ANALYTICS, UPDATE_INTERVAL_ANALYTICS,
UPDATE_INTERVAL_CONNECTION, UPDATE_INTERVAL_CONNECTION,
UPDATE_INTERVAL_SETTINGS, UPDATE_INTERVAL_SETTINGS,
@ -88,9 +89,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: NextDnsConfigEntry) -> b
try: try:
nextdns = await NextDns.create(websession, api_key) nextdns = await NextDns.create(websession, api_key)
except (ApiError, ClientConnectorError, RetryError, TimeoutError) as err: except (ApiError, ClientConnectorError, RetryError, TimeoutError) as err:
raise ConfigEntryNotReady from err raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={
"entry": entry.title,
"error": repr(err),
},
) from err
except InvalidApiKeyError as err: except InvalidApiKeyError as err:
raise ConfigEntryAuthFailed from err raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_error",
translation_placeholders={"entry": entry.title},
) from err
tasks = [] tasks = []
coordinators = {} coordinators = {}

View File

@ -2,15 +2,19 @@
from __future__ import annotations from __future__ import annotations
from nextdns import AnalyticsStatus from aiohttp import ClientError
from aiohttp.client_exceptions import ClientConnectorError
from nextdns import AnalyticsStatus, ApiError, InvalidApiKeyError
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.const import EntityCategory from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import NextDnsConfigEntry from . import NextDnsConfigEntry
from .const import DOMAIN
from .coordinator import NextDnsUpdateCoordinator from .coordinator import NextDnsUpdateCoordinator
PARALLEL_UPDATES = 1 PARALLEL_UPDATES = 1
@ -53,4 +57,21 @@ class NextDnsButton(
async def async_press(self) -> None: async def async_press(self) -> None:
"""Trigger cleaning logs.""" """Trigger cleaning logs."""
try:
await self.coordinator.nextdns.clear_logs(self.coordinator.profile_id) await self.coordinator.nextdns.clear_logs(self.coordinator.profile_id)
except (
ApiError,
ClientConnectorError,
TimeoutError,
ClientError,
) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="method_error",
translation_placeholders={
"entity": self.entity_id,
"error": repr(err),
},
) from err
except InvalidApiKeyError:
self.coordinator.config_entry.async_start_reauth(self.hass)

View File

@ -79,9 +79,20 @@ class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]):
ClientConnectorError, ClientConnectorError,
RetryError, RetryError,
) as err: ) as err:
raise UpdateFailed(err) from err raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_error",
translation_placeholders={
"entry": self.config_entry.title,
"error": repr(err),
},
) from err
except InvalidApiKeyError as err: except InvalidApiKeyError as err:
raise ConfigEntryAuthFailed from err raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_error",
translation_placeholders={"entry": self.config_entry.title},
) from err
async def _async_update_data_internal(self) -> CoordinatorDataT: async def _async_update_data_internal(self) -> CoordinatorDataT:
"""Update data via library.""" """Update data via library."""

View File

@ -359,5 +359,19 @@
"name": "Force YouTube restricted mode" "name": "Force YouTube restricted mode"
} }
} }
},
"exceptions": {
"auth_error": {
"message": "Authentication failed for {entry}, please update your API key"
},
"cannot_connect": {
"message": "An error occurred while connecting to the NextDNS API for {entry}: {error}"
},
"method_error": {
"message": "An error occurred while calling the NextDNS API method for {entity}: {error}"
},
"update_error": {
"message": "An error occurred while retrieving data from the NextDNS API for {entry}: {error}"
}
} }
} }

Some files were not shown because too many files have changed in this diff Show More